blob: 61cb141e19276fe58465920486e563de3f838535 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM64
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/arm64/code-stubs-arm64.h"
17#include "src/arm64/frames-arm64.h"
18#include "src/arm64/macro-assembler-arm64.h"
19
20namespace v8 {
21namespace internal {
22
Ben Murdoch097c5b22016-05-18 11:27:45 +010023#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024
25class JumpPatchSite BASE_EMBEDDED {
26 public:
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
28#ifdef DEBUG
29 info_emitted_ = false;
30#endif
31 }
32
33 ~JumpPatchSite() {
34 if (patch_site_.is_bound()) {
35 DCHECK(info_emitted_);
36 } else {
37 DCHECK(reg_.IsNone());
38 }
39 }
40
41 void EmitJumpIfNotSmi(Register reg, Label* target) {
42 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
43 InstructionAccurateScope scope(masm_, 1);
44 DCHECK(!info_emitted_);
45 DCHECK(reg.Is64Bits());
46 DCHECK(!reg.Is(csp));
47 reg_ = reg;
48 __ bind(&patch_site_);
49 __ tbz(xzr, 0, target); // Always taken before patched.
50 }
51
52 void EmitJumpIfSmi(Register reg, Label* target) {
53 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
54 InstructionAccurateScope scope(masm_, 1);
55 DCHECK(!info_emitted_);
56 DCHECK(reg.Is64Bits());
57 DCHECK(!reg.Is(csp));
58 reg_ = reg;
59 __ bind(&patch_site_);
60 __ tbnz(xzr, 0, target); // Never taken before patched.
61 }
62
63 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
64 UseScratchRegisterScope temps(masm_);
65 Register temp = temps.AcquireX();
66 __ Orr(temp, reg1, reg2);
67 EmitJumpIfNotSmi(temp, target);
68 }
69
70 void EmitPatchInfo() {
71 Assembler::BlockPoolsScope scope(masm_);
72 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
73#ifdef DEBUG
74 info_emitted_ = true;
75#endif
76 }
77
78 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010079 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 MacroAssembler* masm_;
81 Label patch_site_;
82 Register reg_;
83#ifdef DEBUG
84 bool info_emitted_;
85#endif
86};
87
88
89// Generate code for a JS function. On entry to the function the receiver
90// and arguments have been pushed on the stack left to right. The actual
91// argument count matches the formal parameter count expected by the
92// function.
93//
94// The live registers are:
95// - x1: the JS function object being called (i.e. ourselves).
96// - x3: the new target value
97// - cp: our context.
98// - fp: our caller's frame pointer.
99// - jssp: stack pointer.
100// - lr: return address.
101//
102// The function builds a JS frame. See JavaScriptFrameConstants in
103// frames-arm.h for its layout.
104void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ Function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
115 __ Peek(x10, receiver_offset);
116 __ AssertNotSmi(x10);
117 __ CompareObjectType(x10, x10, x11, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack.
122 // The MANUAL indicates that the scope shouldn't actually generate code
123 // to set up the frame because we do it manually below.
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125
126 // This call emits the following sequence in a way that can be patched for
127 // code ageing support:
128 // Push(lr, fp, cp, x1);
129 // Add(fp, jssp, 2 * kPointerSize);
130 info->set_prologue_offset(masm_->pc_offset());
131 __ Prologue(info->GeneratePreagedPrologue());
132
133 // Reserve space on the stack for locals.
134 { Comment cmnt(masm_, "[ Allocate locals");
135 int locals_count = info->scope()->num_stack_slots();
136 // Generators allocate locals, if any, in context slots.
137 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100138 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139 if (locals_count > 0) {
140 if (locals_count >= 128) {
141 Label ok;
142 DCHECK(jssp.Is(__ StackPointer()));
143 __ Sub(x10, jssp, locals_count * kPointerSize);
144 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
145 __ B(hs, &ok);
146 __ CallRuntime(Runtime::kThrowStackOverflow);
147 __ Bind(&ok);
148 }
149 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
150 if (FLAG_optimize_for_size) {
151 __ PushMultipleTimes(x10 , locals_count);
152 } else {
153 const int kMaxPushes = 32;
154 if (locals_count >= kMaxPushes) {
155 int loop_iterations = locals_count / kMaxPushes;
156 __ Mov(x2, loop_iterations);
157 Label loop_header;
158 __ Bind(&loop_header);
159 // Do pushes.
160 __ PushMultipleTimes(x10 , kMaxPushes);
161 __ Subs(x2, x2, 1);
162 __ B(ne, &loop_header);
163 }
164 int remaining = locals_count % kMaxPushes;
165 // Emit the remaining pushes.
166 __ PushMultipleTimes(x10 , remaining);
167 }
168 }
169 }
170
171 bool function_in_register_x1 = true;
172
173 if (info->scope()->num_heap_slots() > 0) {
174 // Argument to NewContext is the function, which is still in x1.
175 Comment cmnt(masm_, "[ Allocate context");
176 bool need_write_barrier = true;
177 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178 if (info->scope()->is_script_scope()) {
179 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
180 __ Push(x1, x10);
181 __ CallRuntime(Runtime::kNewScriptContext);
Ben Murdochc5610432016-08-08 18:44:38 +0100182 PrepareForBailoutForId(BailoutId::ScriptContext(),
183 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000184 // The new target value is not used, clobbering is safe.
185 DCHECK_NULL(info->scope()->new_target_var());
186 } else {
187 if (info->scope()->new_target_var() != nullptr) {
188 __ Push(x3); // Preserve new target.
189 }
190 if (slots <= FastNewContextStub::kMaximumSlots) {
191 FastNewContextStub stub(isolate(), slots);
192 __ CallStub(&stub);
193 // Result of FastNewContextStub is always in new space.
194 need_write_barrier = false;
195 } else {
196 __ Push(x1);
197 __ CallRuntime(Runtime::kNewFunctionContext);
198 }
199 if (info->scope()->new_target_var() != nullptr) {
200 __ Pop(x3); // Restore new target.
201 }
202 }
203 function_in_register_x1 = false;
204 // Context is returned in x0. It replaces the context passed to us.
205 // It's saved in the stack and kept live in cp.
206 __ Mov(cp, x0);
207 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
208 // Copy any necessary parameters into the context.
209 int num_parameters = info->scope()->num_parameters();
210 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
211 for (int i = first_parameter; i < num_parameters; i++) {
212 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
213 if (var->IsContextSlot()) {
214 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
215 (num_parameters - 1 - i) * kPointerSize;
216 // Load parameter from stack.
217 __ Ldr(x10, MemOperand(fp, parameter_offset));
218 // Store it in the context.
219 MemOperand target = ContextMemOperand(cp, var->index());
220 __ Str(x10, target);
221
222 // Update the write barrier.
223 if (need_write_barrier) {
224 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
225 x11, kLRHasBeenSaved, kDontSaveFPRegs);
226 } else if (FLAG_debug_code) {
227 Label done;
228 __ JumpIfInNewSpace(cp, &done);
229 __ Abort(kExpectedNewSpaceObject);
230 __ bind(&done);
231 }
232 }
233 }
234 }
235
236 // Register holding this function and new target are both trashed in case we
237 // bailout here. But since that can happen only when new target is not used
238 // and we allocate a context, the value of |function_in_register| is correct.
Ben Murdochc5610432016-08-08 18:44:38 +0100239 PrepareForBailoutForId(BailoutId::FunctionContext(),
240 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000241
242 // Possibly set up a local binding to the this function which is used in
243 // derived constructors with super calls.
244 Variable* this_function_var = scope()->this_function_var();
245 if (this_function_var != nullptr) {
246 Comment cmnt(masm_, "[ This function");
247 if (!function_in_register_x1) {
248 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
249 // The write barrier clobbers register again, keep it marked as such.
250 }
251 SetVar(this_function_var, x1, x0, x2);
252 }
253
254 // Possibly set up a local binding to the new target value.
255 Variable* new_target_var = scope()->new_target_var();
256 if (new_target_var != nullptr) {
257 Comment cmnt(masm_, "[ new.target");
258 SetVar(new_target_var, x3, x0, x2);
259 }
260
261 // Possibly allocate RestParameters
262 int rest_index;
263 Variable* rest_param = scope()->rest_parameter(&rest_index);
264 if (rest_param) {
265 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100266 if (!function_in_register_x1) {
267 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
268 }
269 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100271 function_in_register_x1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000272 SetVar(rest_param, x0, x1, x2);
273 }
274
275 Variable* arguments = scope()->arguments();
276 if (arguments != NULL) {
277 // Function uses arguments object.
278 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 if (!function_in_register_x1) {
280 // Load this again, if it's used by the local context below.
281 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
282 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100283 if (is_strict(language_mode()) || !has_simple_parameters()) {
284 FastNewStrictArgumentsStub stub(isolate());
285 __ CallStub(&stub);
286 } else if (literal()->has_duplicate_parameters()) {
287 __ Push(x1);
288 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
289 } else {
290 FastNewSloppyArgumentsStub stub(isolate());
291 __ CallStub(&stub);
292 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000293
294 SetVar(arguments, x0, x1, x2);
295 }
296
297 if (FLAG_trace) {
298 __ CallRuntime(Runtime::kTraceEnter);
299 }
300
Ben Murdochda12d292016-06-02 14:46:10 +0100301 // Visit the declarations and body.
Ben Murdochc5610432016-08-08 18:44:38 +0100302 PrepareForBailoutForId(BailoutId::FunctionEntry(),
303 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100304 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100306 VisitDeclarations(scope()->declarations());
307 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308
Ben Murdochda12d292016-06-02 14:46:10 +0100309 // Assert that the declarations do not use ICs. Otherwise the debugger
310 // won't be able to redirect a PC at an IC to the correct IC in newly
311 // recompiled code.
312 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313
Ben Murdochda12d292016-06-02 14:46:10 +0100314 {
315 Comment cmnt(masm_, "[ Stack check");
Ben Murdochc5610432016-08-08 18:44:38 +0100316 PrepareForBailoutForId(BailoutId::Declarations(),
317 BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +0100318 Label ok;
319 DCHECK(jssp.Is(__ StackPointer()));
320 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
321 __ B(hs, &ok);
322 PredictableCodeSizeScope predictable(masm_,
323 Assembler::kCallSizeWithRelocation);
324 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
325 __ Bind(&ok);
326 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000327
Ben Murdochda12d292016-06-02 14:46:10 +0100328 {
329 Comment cmnt(masm_, "[ Body");
330 DCHECK(loop_depth() == 0);
331 VisitStatements(literal()->body());
332 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333 }
334
335 // Always emit a 'return undefined' in case control fell off the end of
336 // the body.
337 { Comment cmnt(masm_, "[ return <undefined>;");
338 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
339 }
340 EmitReturnSequence();
341
342 // Force emission of the pools, so they don't get emitted in the middle
343 // of the back edge table.
344 masm()->CheckVeneerPool(true, false);
345 masm()->CheckConstPool(true, false);
346}
347
348
349void FullCodeGenerator::ClearAccumulator() {
350 __ Mov(x0, Smi::FromInt(0));
351}
352
353
354void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
355 __ Mov(x2, Operand(profiling_counter_));
356 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
357 __ Subs(x3, x3, Smi::FromInt(delta));
358 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
359}
360
361
362void FullCodeGenerator::EmitProfilingCounterReset() {
363 int reset_value = FLAG_interrupt_budget;
364 __ Mov(x2, Operand(profiling_counter_));
365 __ Mov(x3, Smi::FromInt(reset_value));
366 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
367}
368
369
370void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
371 Label* back_edge_target) {
372 DCHECK(jssp.Is(__ StackPointer()));
373 Comment cmnt(masm_, "[ Back edge bookkeeping");
374 // Block literal pools whilst emitting back edge code.
375 Assembler::BlockPoolsScope block_const_pool(masm_);
376 Label ok;
377
378 DCHECK(back_edge_target->is_bound());
379 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
380 // to reduce the absolute error due to the integer division. To do that,
381 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
382 // the result).
383 int distance =
384 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
385 kCodeSizeMultiplier / 2);
386 int weight = Min(kMaxBackEdgeWeight,
387 Max(1, distance / kCodeSizeMultiplier));
388 EmitProfilingCounterDecrement(weight);
389 __ B(pl, &ok);
390 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
391
392 // Record a mapping of this PC offset to the OSR id. This is used to find
393 // the AST id from the unoptimized code in order to use it as a key into
394 // the deoptimization input data found in the optimized code.
395 RecordBackEdge(stmt->OsrEntryId());
396
397 EmitProfilingCounterReset();
398
399 __ Bind(&ok);
Ben Murdochc5610432016-08-08 18:44:38 +0100400 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000401 // Record a mapping of the OSR id to this PC. This is used if the OSR
402 // entry becomes the target of a bailout. We don't expect it to be, but
403 // we want it to work if it is.
Ben Murdochc5610432016-08-08 18:44:38 +0100404 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405}
406
Ben Murdoch097c5b22016-05-18 11:27:45 +0100407void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
408 bool is_tail_call) {
409 // Pretend that the exit is a backwards jump to the entry.
410 int weight = 1;
411 if (info_->ShouldSelfOptimize()) {
412 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413 } else {
414 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
415 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
416 }
417 EmitProfilingCounterDecrement(weight);
418 Label ok;
419 __ B(pl, &ok);
420 // Don't need to save result register if we are going to do a tail call.
421 if (!is_tail_call) {
422 __ Push(x0);
423 }
424 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
425 if (!is_tail_call) {
426 __ Pop(x0);
427 }
428 EmitProfilingCounterReset();
429 __ Bind(&ok);
430}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431
432void FullCodeGenerator::EmitReturnSequence() {
433 Comment cmnt(masm_, "[ Return sequence");
434
435 if (return_label_.is_bound()) {
436 __ B(&return_label_);
437
438 } else {
439 __ Bind(&return_label_);
440 if (FLAG_trace) {
441 // Push the return value on the stack as the parameter.
442 // Runtime::TraceExit returns its parameter in x0.
443 __ Push(result_register());
444 __ CallRuntime(Runtime::kTraceExit);
445 DCHECK(x0.Is(result_register()));
446 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100447 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448
449 SetReturnPosition(literal());
450 const Register& current_sp = __ StackPointer();
451 // Nothing ensures 16 bytes alignment here.
452 DCHECK(!current_sp.Is(csp));
453 __ Mov(current_sp, fp);
454 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
455 // Drop the arguments and receiver and return.
456 // TODO(all): This implementation is overkill as it supports 2**31+1
457 // arguments, consider how to improve it without creating a security
458 // hole.
459 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
460 __ Add(current_sp, current_sp, ip0);
461 __ Ret();
462 int32_t arg_count = info_->scope()->num_parameters() + 1;
463 __ dc64(kXRegSize * arg_count);
464 }
465}
466
Ben Murdochc5610432016-08-08 18:44:38 +0100467void FullCodeGenerator::RestoreContext() {
468 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
469}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470
471void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
472 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
473 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100474 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000475}
476
477
478void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
479 // Root values have no side effects.
480}
481
482
483void FullCodeGenerator::AccumulatorValueContext::Plug(
484 Heap::RootListIndex index) const {
485 __ LoadRoot(result_register(), index);
486}
487
488
489void FullCodeGenerator::StackValueContext::Plug(
490 Heap::RootListIndex index) const {
491 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100492 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493}
494
495
496void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
497 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
498 false_label_);
499 if (index == Heap::kUndefinedValueRootIndex ||
500 index == Heap::kNullValueRootIndex ||
501 index == Heap::kFalseValueRootIndex) {
502 if (false_label_ != fall_through_) __ B(false_label_);
503 } else if (index == Heap::kTrueValueRootIndex) {
504 if (true_label_ != fall_through_) __ B(true_label_);
505 } else {
506 __ LoadRoot(result_register(), index);
507 codegen()->DoTest(this);
508 }
509}
510
511
512void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
513}
514
515
516void FullCodeGenerator::AccumulatorValueContext::Plug(
517 Handle<Object> lit) const {
518 __ Mov(result_register(), Operand(lit));
519}
520
521
522void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
523 // Immediates cannot be pushed directly.
524 __ Mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100525 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000526}
527
528
529void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
530 codegen()->PrepareForBailoutBeforeSplit(condition(),
531 true,
532 true_label_,
533 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100534 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
536 if (false_label_ != fall_through_) __ B(false_label_);
537 } else if (lit->IsTrue() || lit->IsJSObject()) {
538 if (true_label_ != fall_through_) __ B(true_label_);
539 } else if (lit->IsString()) {
540 if (String::cast(*lit)->length() == 0) {
541 if (false_label_ != fall_through_) __ B(false_label_);
542 } else {
543 if (true_label_ != fall_through_) __ B(true_label_);
544 }
545 } else if (lit->IsSmi()) {
546 if (Smi::cast(*lit)->value() == 0) {
547 if (false_label_ != fall_through_) __ B(false_label_);
548 } else {
549 if (true_label_ != fall_through_) __ B(true_label_);
550 }
551 } else {
552 // For simplicity we always test the accumulator register.
553 __ Mov(result_register(), Operand(lit));
554 codegen()->DoTest(this);
555 }
556}
557
558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
560 Register reg) const {
561 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100562 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563 __ Poke(reg, 0);
564}
565
566
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
568 Label* materialize_false) const {
569 DCHECK(materialize_true == materialize_false);
570 __ Bind(materialize_true);
571}
572
573
574void FullCodeGenerator::AccumulatorValueContext::Plug(
575 Label* materialize_true,
576 Label* materialize_false) const {
577 Label done;
578 __ Bind(materialize_true);
579 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
580 __ B(&done);
581 __ Bind(materialize_false);
582 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
583 __ Bind(&done);
584}
585
586
587void FullCodeGenerator::StackValueContext::Plug(
588 Label* materialize_true,
589 Label* materialize_false) const {
590 Label done;
591 __ Bind(materialize_true);
592 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
593 __ B(&done);
594 __ Bind(materialize_false);
595 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
596 __ Bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100597 codegen()->PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000598}
599
600
601void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
602 Label* materialize_false) const {
603 DCHECK(materialize_true == true_label_);
604 DCHECK(materialize_false == false_label_);
605}
606
607
608void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
609 Heap::RootListIndex value_root_index =
610 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
611 __ LoadRoot(result_register(), value_root_index);
612}
613
614
615void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
616 Heap::RootListIndex value_root_index =
617 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
618 __ LoadRoot(x10, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100619 codegen()->PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620}
621
622
623void FullCodeGenerator::TestContext::Plug(bool flag) const {
624 codegen()->PrepareForBailoutBeforeSplit(condition(),
625 true,
626 true_label_,
627 false_label_);
628 if (flag) {
629 if (true_label_ != fall_through_) {
630 __ B(true_label_);
631 }
632 } else {
633 if (false_label_ != fall_through_) {
634 __ B(false_label_);
635 }
636 }
637}
638
639
640void FullCodeGenerator::DoTest(Expression* condition,
641 Label* if_true,
642 Label* if_false,
643 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100644 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000645 CallIC(ic, condition->test_id());
646 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
647 Split(eq, if_true, if_false, fall_through);
648}
649
650
651// If (cond), branch to if_true.
652// If (!cond), branch to if_false.
653// fall_through is used as an optimization in cases where only one branch
654// instruction is necessary.
655void FullCodeGenerator::Split(Condition cond,
656 Label* if_true,
657 Label* if_false,
658 Label* fall_through) {
659 if (if_false == fall_through) {
660 __ B(cond, if_true);
661 } else if (if_true == fall_through) {
662 DCHECK(if_false != fall_through);
663 __ B(NegateCondition(cond), if_false);
664 } else {
665 __ B(cond, if_true);
666 __ B(if_false);
667 }
668}
669
670
671MemOperand FullCodeGenerator::StackOperand(Variable* var) {
672 // Offset is negative because higher indexes are at lower addresses.
673 int offset = -var->index() * kXRegSize;
674 // Adjust by a (parameter or local) base offset.
675 if (var->IsParameter()) {
676 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
677 } else {
678 offset += JavaScriptFrameConstants::kLocal0Offset;
679 }
680 return MemOperand(fp, offset);
681}
682
683
684MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
685 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
686 if (var->IsContextSlot()) {
687 int context_chain_length = scope()->ContextChainLength(var->scope());
688 __ LoadContext(scratch, context_chain_length);
689 return ContextMemOperand(scratch, var->index());
690 } else {
691 return StackOperand(var);
692 }
693}
694
695
696void FullCodeGenerator::GetVar(Register dest, Variable* var) {
697 // Use destination as scratch.
698 MemOperand location = VarOperand(var, dest);
699 __ Ldr(dest, location);
700}
701
702
703void FullCodeGenerator::SetVar(Variable* var,
704 Register src,
705 Register scratch0,
706 Register scratch1) {
707 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
708 DCHECK(!AreAliased(src, scratch0, scratch1));
709 MemOperand location = VarOperand(var, scratch0);
710 __ Str(src, location);
711
712 // Emit the write barrier code if the location is in the heap.
713 if (var->IsContextSlot()) {
714 // scratch0 contains the correct context.
715 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
716 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
717 }
718}
719
720
721void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
722 bool should_normalize,
723 Label* if_true,
724 Label* if_false) {
725 // Only prepare for bailouts before splits if we're in a test
726 // context. Otherwise, we let the Visit function deal with the
727 // preparation to avoid preparing with the same AST id twice.
728 if (!context()->IsTest()) return;
729
730 // TODO(all): Investigate to see if there is something to work on here.
731 Label skip;
732 if (should_normalize) {
733 __ B(&skip);
734 }
Ben Murdochc5610432016-08-08 18:44:38 +0100735 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000736 if (should_normalize) {
737 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
738 Split(eq, if_true, if_false, NULL);
739 __ Bind(&skip);
740 }
741}
742
743
744void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
745 // The variable in the declaration always resides in the current function
746 // context.
747 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100748 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000749 // Check that we're not inside a with or catch context.
750 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
751 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
752 __ Check(ne, kDeclarationInWithContext);
753 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
754 __ Check(ne, kDeclarationInCatchContext);
755 }
756}
757
758
759void FullCodeGenerator::VisitVariableDeclaration(
760 VariableDeclaration* declaration) {
761 // If it was not possible to allocate the variable at compile time, we
762 // need to "declare" it at runtime to make sure it actually exists in the
763 // local context.
764 VariableProxy* proxy = declaration->proxy();
765 VariableMode mode = declaration->mode();
766 Variable* variable = proxy->var();
Ben Murdochc5610432016-08-08 18:44:38 +0100767 bool hole_init = mode == LET || mode == CONST;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000768
769 switch (variable->location()) {
770 case VariableLocation::GLOBAL:
771 case VariableLocation::UNALLOCATED:
Ben Murdochc5610432016-08-08 18:44:38 +0100772 DCHECK(!variable->binding_needs_init());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000773 globals_->Add(variable->name(), zone());
Ben Murdochc5610432016-08-08 18:44:38 +0100774 globals_->Add(isolate()->factory()->undefined_value(), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000775 break;
776
777 case VariableLocation::PARAMETER:
778 case VariableLocation::LOCAL:
779 if (hole_init) {
780 Comment cmnt(masm_, "[ VariableDeclaration");
781 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
782 __ Str(x10, StackOperand(variable));
783 }
784 break;
785
786 case VariableLocation::CONTEXT:
787 if (hole_init) {
788 Comment cmnt(masm_, "[ VariableDeclaration");
789 EmitDebugCheckDeclarationContext(variable);
790 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
791 __ Str(x10, ContextMemOperand(cp, variable->index()));
792 // No write barrier since the_hole_value is in old space.
Ben Murdochc5610432016-08-08 18:44:38 +0100793 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 }
795 break;
796
797 case VariableLocation::LOOKUP: {
798 Comment cmnt(masm_, "[ VariableDeclaration");
799 __ Mov(x2, Operand(variable->name()));
800 // Declaration nodes are always introduced in one of four modes.
801 DCHECK(IsDeclaredVariableMode(mode));
802 // Push initial value, if any.
803 // Note: For variables we must not push an initial value (such as
804 // 'undefined') because we may have a (legal) redeclaration and we
805 // must not destroy the current value.
806 if (hole_init) {
807 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
808 __ Push(x2, x0);
809 } else {
810 // Pushing 0 (xzr) indicates no initial value.
811 __ Push(x2, xzr);
812 }
813 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
814 __ CallRuntime(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100815 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000816 break;
817 }
818 }
819}
820
821
822void FullCodeGenerator::VisitFunctionDeclaration(
823 FunctionDeclaration* declaration) {
824 VariableProxy* proxy = declaration->proxy();
825 Variable* variable = proxy->var();
826 switch (variable->location()) {
827 case VariableLocation::GLOBAL:
828 case VariableLocation::UNALLOCATED: {
829 globals_->Add(variable->name(), zone());
830 Handle<SharedFunctionInfo> function =
831 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
832 // Check for stack overflow exception.
833 if (function.is_null()) return SetStackOverflow();
834 globals_->Add(function, zone());
835 break;
836 }
837
838 case VariableLocation::PARAMETER:
839 case VariableLocation::LOCAL: {
840 Comment cmnt(masm_, "[ Function Declaration");
841 VisitForAccumulatorValue(declaration->fun());
842 __ Str(result_register(), StackOperand(variable));
843 break;
844 }
845
846 case VariableLocation::CONTEXT: {
847 Comment cmnt(masm_, "[ Function Declaration");
848 EmitDebugCheckDeclarationContext(variable);
849 VisitForAccumulatorValue(declaration->fun());
850 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
851 int offset = Context::SlotOffset(variable->index());
852 // We know that we have written a function, which is not a smi.
853 __ RecordWriteContextSlot(cp,
854 offset,
855 result_register(),
856 x2,
857 kLRHasBeenSaved,
858 kDontSaveFPRegs,
859 EMIT_REMEMBERED_SET,
860 OMIT_SMI_CHECK);
Ben Murdochc5610432016-08-08 18:44:38 +0100861 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000862 break;
863 }
864
865 case VariableLocation::LOOKUP: {
866 Comment cmnt(masm_, "[ Function Declaration");
867 __ Mov(x2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100868 PushOperand(x2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000869 // Push initial value for function declaration.
870 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100871 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
872 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdochc5610432016-08-08 18:44:38 +0100873 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000874 break;
875 }
876 }
877}
878
879
880void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
881 // Call the runtime to declare the globals.
882 __ Mov(x11, Operand(pairs));
883 Register flags = xzr;
884 if (Smi::FromInt(DeclareGlobalsFlags())) {
885 flags = x10;
886 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
887 }
888 __ Push(x11, flags);
889 __ CallRuntime(Runtime::kDeclareGlobals);
890 // Return value is ignored.
891}
892
893
894void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
895 // Call the runtime to declare the modules.
896 __ Push(descriptions);
897 __ CallRuntime(Runtime::kDeclareModules);
898 // Return value is ignored.
899}
900
901
902void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
903 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
904 Comment cmnt(masm_, "[ SwitchStatement");
905 Breakable nested_statement(this, stmt);
906 SetStatementPosition(stmt);
907
908 // Keep the switch value on the stack until a case matches.
909 VisitForStackValue(stmt->tag());
Ben Murdochc5610432016-08-08 18:44:38 +0100910 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000911
912 ZoneList<CaseClause*>* clauses = stmt->cases();
913 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
914
915 Label next_test; // Recycled for each test.
916 // Compile all the tests with branches to their bodies.
917 for (int i = 0; i < clauses->length(); i++) {
918 CaseClause* clause = clauses->at(i);
919 clause->body_target()->Unuse();
920
921 // The default is not a test, but remember it as final fall through.
922 if (clause->is_default()) {
923 default_clause = clause;
924 continue;
925 }
926
927 Comment cmnt(masm_, "[ Case comparison");
928 __ Bind(&next_test);
929 next_test.Unuse();
930
931 // Compile the label expression.
932 VisitForAccumulatorValue(clause->label());
933
934 // Perform the comparison as if via '==='.
935 __ Peek(x1, 0); // Switch value.
936
937 JumpPatchSite patch_site(masm_);
938 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
939 Label slow_case;
940 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
941 __ Cmp(x1, x0);
942 __ B(ne, &next_test);
943 __ Drop(1); // Switch value is no longer needed.
944 __ B(clause->body_target());
945 __ Bind(&slow_case);
946 }
947
948 // Record position before stub call for type feedback.
949 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100950 Handle<Code> ic =
951 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952 CallIC(ic, clause->CompareId());
953 patch_site.EmitPatchInfo();
954
955 Label skip;
956 __ B(&skip);
Ben Murdochc5610432016-08-08 18:44:38 +0100957 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000958 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
959 __ Drop(1);
960 __ B(clause->body_target());
961 __ Bind(&skip);
962
963 __ Cbnz(x0, &next_test);
964 __ Drop(1); // Switch value is no longer needed.
965 __ B(clause->body_target());
966 }
967
968 // Discard the test value and jump to the default if present, otherwise to
969 // the end of the statement.
970 __ Bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100971 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000972 if (default_clause == NULL) {
973 __ B(nested_statement.break_label());
974 } else {
975 __ B(default_clause->body_target());
976 }
977
978 // Compile all the case bodies.
979 for (int i = 0; i < clauses->length(); i++) {
980 Comment cmnt(masm_, "[ Case body");
981 CaseClause* clause = clauses->at(i);
982 __ Bind(clause->body_target());
Ben Murdochc5610432016-08-08 18:44:38 +0100983 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000984 VisitStatements(clause->statements());
985 }
986
987 __ Bind(nested_statement.break_label());
Ben Murdochc5610432016-08-08 18:44:38 +0100988 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989}
990
991
992void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
993 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
994 Comment cmnt(masm_, "[ ForInStatement");
995 SetStatementPosition(stmt, SKIP_BREAK);
996
997 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
998
999 // TODO(all): This visitor probably needs better comments and a revisit.
1000
Ben Murdoch097c5b22016-05-18 11:27:45 +01001001 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002 SetExpressionAsStatementPosition(stmt->enumerable());
1003 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +01001004 OperandStackDepthIncrement(5);
1005
1006 Label loop, exit;
1007 Iteration loop_statement(this, stmt);
1008 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001009
Ben Murdoch097c5b22016-05-18 11:27:45 +01001010 // If the object is null or undefined, skip over the loop, otherwise convert
1011 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001012 Label convert, done_convert;
1013 __ JumpIfSmi(x0, &convert);
1014 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, &done_convert, ge);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001015 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, &exit);
1016 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001017 __ Bind(&convert);
1018 ToObjectStub stub(isolate());
1019 __ CallStub(&stub);
1020 __ Bind(&done_convert);
Ben Murdochc5610432016-08-08 18:44:38 +01001021 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001022 __ Push(x0);
1023
Ben Murdochc5610432016-08-08 18:44:38 +01001024 // Check cache validity in generated code. If we cannot guarantee cache
1025 // validity, call the runtime system to check cache validity or get the
1026 // property names in a fixed array. Note: Proxies never have an enum cache,
1027 // so will always take the slow path.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001028 Label call_runtime;
1029 __ CheckEnumCache(x0, x15, x10, x11, x12, x13, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001030
1031 // The enum cache is valid. Load the map of the object being
1032 // iterated over and use the cache for the iteration.
1033 Label use_cache;
1034 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1035 __ B(&use_cache);
1036
1037 // Get the set of properties to enumerate.
1038 __ Bind(&call_runtime);
1039 __ Push(x0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001040 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdochc5610432016-08-08 18:44:38 +01001041 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042
1043 // If we got a map from the runtime call, we can do a fast
1044 // modification check. Otherwise, we got a fixed array, and we have
1045 // to do a slow check.
1046 Label fixed_array, no_descriptors;
1047 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1048 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1049
1050 // We got a map in register x0. Get the enumeration cache from it.
1051 __ Bind(&use_cache);
1052
1053 __ EnumLengthUntagged(x1, x0);
1054 __ Cbz(x1, &no_descriptors);
1055
1056 __ LoadInstanceDescriptors(x0, x2);
1057 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1058 __ Ldr(x2,
1059 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1060
1061 // Set up the four remaining stack slots.
1062 __ SmiTag(x1);
1063 // Map, enumeration cache, enum cache length, zero (both last as smis).
1064 __ Push(x0, x2, x1, xzr);
1065 __ B(&loop);
1066
1067 __ Bind(&no_descriptors);
1068 __ Drop(1);
1069 __ B(&exit);
1070
1071 // We got a fixed array in register x0. Iterate through that.
1072 __ Bind(&fixed_array);
1073
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074 __ Mov(x1, Smi::FromInt(1)); // Smi(1) indicates slow check.
1075 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001076 __ Push(x1, x0, x2); // Smi and array, fixed array length (as smi).
Ben Murdochc5610432016-08-08 18:44:38 +01001077 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001078 __ Push(xzr); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079
1080 // Generate code for doing the condition check.
1081 __ Bind(&loop);
1082 SetExpressionAsStatementPosition(stmt->each());
1083
1084 // Load the current count to x0, load the length to x1.
1085 __ PeekPair(x0, x1, 0);
1086 __ Cmp(x0, x1); // Compare to the array length.
1087 __ B(hs, loop_statement.break_label());
1088
1089 // Get the current entry of the array into register r3.
1090 __ Peek(x10, 2 * kXRegSize);
1091 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1092 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1093
1094 // Get the expected map from the stack or a smi in the
1095 // permanent slow case into register x10.
1096 __ Peek(x2, 3 * kXRegSize);
1097
1098 // Check if the expected map still matches that of the enumerable.
1099 // If not, we may have to filter the key.
1100 Label update_each;
1101 __ Peek(x1, 4 * kXRegSize);
1102 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1103 __ Cmp(x11, x2);
1104 __ B(eq, &update_each);
1105
Ben Murdochda12d292016-06-02 14:46:10 +01001106 // We need to filter the key, record slow-path here.
1107 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001108 __ EmitLoadTypeFeedbackVector(x0);
1109 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1110 __ Str(x10, FieldMemOperand(x0, FixedArray::OffsetOfElementAt(vector_index)));
1111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001112 // Convert the entry to a string or (smi) 0 if it isn't a property
1113 // any more. If the property has been removed while iterating, we
1114 // just skip it.
1115 __ Push(x1, x3);
1116 __ CallRuntime(Runtime::kForInFilter);
Ben Murdochc5610432016-08-08 18:44:38 +01001117 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001118 __ Mov(x3, x0);
1119 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1120 loop_statement.continue_label());
1121
1122 // Update the 'each' property or variable from the possibly filtered
1123 // entry in register x3.
1124 __ Bind(&update_each);
1125 __ Mov(result_register(), x3);
1126 // Perform the assignment as if via '='.
1127 { EffectContext context(this);
1128 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001129 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001130 }
1131
1132 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
Ben Murdochc5610432016-08-08 18:44:38 +01001133 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001134 // Generate code for the body of the loop.
1135 Visit(stmt->body());
1136
1137 // Generate code for going to the next element by incrementing
1138 // the index (smi) stored on top of the stack.
1139 __ Bind(loop_statement.continue_label());
1140 // TODO(all): We could use a callee saved register to avoid popping.
1141 __ Pop(x0);
1142 __ Add(x0, x0, Smi::FromInt(1));
1143 __ Push(x0);
1144
1145 EmitBackEdgeBookkeeping(stmt, &loop);
1146 __ B(&loop);
1147
1148 // Remove the pointers stored on the stack.
1149 __ Bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001150 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151
1152 // Exit and decrement the loop depth.
Ben Murdochc5610432016-08-08 18:44:38 +01001153 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154 __ Bind(&exit);
1155 decrement_loop_depth();
1156}
1157
1158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001159void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1160 FeedbackVectorSlot slot) {
1161 DCHECK(NeedsHomeObject(initializer));
1162 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1163 __ Mov(StoreDescriptor::NameRegister(),
1164 Operand(isolate()->factory()->home_object_symbol()));
1165 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1166 EmitLoadStoreICSlot(slot);
1167 CallStoreIC();
1168}
1169
1170
1171void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1172 int offset,
1173 FeedbackVectorSlot slot) {
1174 DCHECK(NeedsHomeObject(initializer));
1175 __ Move(StoreDescriptor::ReceiverRegister(), x0);
1176 __ Mov(StoreDescriptor::NameRegister(),
1177 Operand(isolate()->factory()->home_object_symbol()));
1178 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1179 EmitLoadStoreICSlot(slot);
1180 CallStoreIC();
1181}
1182
1183
1184void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1185 TypeofMode typeof_mode,
1186 Label* slow) {
1187 Register current = cp;
1188 Register next = x10;
1189 Register temp = x11;
1190
1191 Scope* s = scope();
1192 while (s != NULL) {
1193 if (s->num_heap_slots() > 0) {
1194 if (s->calls_sloppy_eval()) {
1195 // Check that extension is "the hole".
1196 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1197 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1198 }
1199 // Load next context in chain.
1200 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1201 // Walk the rest of the chain without clobbering cp.
1202 current = next;
1203 }
1204 // If no outer scope calls eval, we do not need to check more
1205 // context extensions.
1206 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1207 s = s->outer_scope();
1208 }
1209
1210 if (s->is_eval_scope()) {
1211 Label loop, fast;
1212 __ Mov(next, current);
1213
1214 __ Bind(&loop);
1215 // Terminate at native context.
1216 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1217 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1218 // Check that extension is "the hole".
1219 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1220 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1221 // Load next context in chain.
1222 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1223 __ B(&loop);
1224 __ Bind(&fast);
1225 }
1226
1227 // All extension objects were empty and it is safe to use a normal global
1228 // load machinery.
1229 EmitGlobalVariableLoad(proxy, typeof_mode);
1230}
1231
1232
1233MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1234 Label* slow) {
1235 DCHECK(var->IsContextSlot());
1236 Register context = cp;
1237 Register next = x10;
1238 Register temp = x11;
1239
1240 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1241 if (s->num_heap_slots() > 0) {
1242 if (s->calls_sloppy_eval()) {
1243 // Check that extension is "the hole".
1244 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1245 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1246 }
1247 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1248 // Walk the rest of the chain without clobbering cp.
1249 context = next;
1250 }
1251 }
1252 // Check that last extension is "the hole".
1253 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1254 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1255
1256 // This function is used only for loads, not stores, so it's safe to
1257 // return an cp-based operand (the write barrier cannot be allowed to
1258 // destroy the cp register).
1259 return ContextMemOperand(context, var->index());
1260}
1261
1262
1263void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1264 TypeofMode typeof_mode,
1265 Label* slow, Label* done) {
1266 // Generate fast-case code for variables that might be shadowed by
1267 // eval-introduced variables. Eval is used a lot without
1268 // introducing variables. In those cases, we do not want to
1269 // perform a runtime call for all variables in the scope
1270 // containing the eval.
1271 Variable* var = proxy->var();
1272 if (var->mode() == DYNAMIC_GLOBAL) {
1273 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1274 __ B(done);
1275 } else if (var->mode() == DYNAMIC_LOCAL) {
1276 Variable* local = var->local_if_not_shadowed();
1277 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
Ben Murdochc5610432016-08-08 18:44:38 +01001278 if (local->mode() == LET || local->mode() == CONST) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001279 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
Ben Murdochc5610432016-08-08 18:44:38 +01001280 __ Mov(x0, Operand(var->name()));
1281 __ Push(x0);
1282 __ CallRuntime(Runtime::kThrowReferenceError);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001283 }
1284 __ B(done);
1285 }
1286}
1287
1288
1289void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1290 TypeofMode typeof_mode) {
1291 Variable* var = proxy->var();
1292 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1293 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1294 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1295 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1296 __ Mov(LoadDescriptor::SlotRegister(),
1297 SmiFromSlot(proxy->VariableFeedbackSlot()));
1298 CallLoadIC(typeof_mode);
1299}
1300
1301
1302void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1303 TypeofMode typeof_mode) {
1304 // Record position before possible IC call.
1305 SetExpressionPosition(proxy);
Ben Murdochc5610432016-08-08 18:44:38 +01001306 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307 Variable* var = proxy->var();
1308
1309 // Three cases: global variables, lookup variables, and all other types of
1310 // variables.
1311 switch (var->location()) {
1312 case VariableLocation::GLOBAL:
1313 case VariableLocation::UNALLOCATED: {
1314 Comment cmnt(masm_, "Global variable");
1315 EmitGlobalVariableLoad(proxy, typeof_mode);
1316 context()->Plug(x0);
1317 break;
1318 }
1319
1320 case VariableLocation::PARAMETER:
1321 case VariableLocation::LOCAL:
1322 case VariableLocation::CONTEXT: {
1323 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1324 Comment cmnt(masm_, var->IsContextSlot()
1325 ? "Context variable"
1326 : "Stack variable");
1327 if (NeedsHoleCheckForLoad(proxy)) {
1328 // Let and const need a read barrier.
1329 GetVar(x0, var);
1330 Label done;
1331 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1332 if (var->mode() == LET || var->mode() == CONST) {
1333 // Throw a reference error when using an uninitialized let/const
1334 // binding in harmony mode.
1335 __ Mov(x0, Operand(var->name()));
1336 __ Push(x0);
1337 __ CallRuntime(Runtime::kThrowReferenceError);
1338 __ Bind(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001339 }
1340 context()->Plug(x0);
1341 break;
1342 }
1343 context()->Plug(var);
1344 break;
1345 }
1346
1347 case VariableLocation::LOOKUP: {
1348 Label done, slow;
1349 // Generate code for loading from variables potentially shadowed by
1350 // eval-introduced variables.
1351 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1352 __ Bind(&slow);
1353 Comment cmnt(masm_, "Lookup variable");
Ben Murdoch097c5b22016-05-18 11:27:45 +01001354 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001355 Runtime::FunctionId function_id =
1356 typeof_mode == NOT_INSIDE_TYPEOF
1357 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001358 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001359 __ CallRuntime(function_id);
1360 __ Bind(&done);
1361 context()->Plug(x0);
1362 break;
1363 }
1364 }
1365}
1366
1367
1368void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1369 Comment cmnt(masm_, "[ RegExpLiteral");
1370 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1371 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1372 __ Mov(x1, Operand(expr->pattern()));
1373 __ Mov(x0, Smi::FromInt(expr->flags()));
1374 FastCloneRegExpStub stub(isolate());
1375 __ CallStub(&stub);
1376 context()->Plug(x0);
1377}
1378
1379
1380void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1381 Expression* expression = (property == NULL) ? NULL : property->value();
1382 if (expression == NULL) {
1383 __ LoadRoot(x10, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001384 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001385 } else {
1386 VisitForStackValue(expression);
1387 if (NeedsHomeObject(expression)) {
1388 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1389 property->kind() == ObjectLiteral::Property::SETTER);
1390 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1391 EmitSetHomeObject(expression, offset, property->GetSlot());
1392 }
1393 }
1394}
1395
1396
1397void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1398 Comment cmnt(masm_, "[ ObjectLiteral");
1399
1400 Handle<FixedArray> constant_properties = expr->constant_properties();
1401 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1402 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1403 __ Mov(x1, Operand(constant_properties));
1404 int flags = expr->ComputeFlags();
1405 __ Mov(x0, Smi::FromInt(flags));
1406 if (MustCreateObjectLiteralWithRuntime(expr)) {
1407 __ Push(x3, x2, x1, x0);
1408 __ CallRuntime(Runtime::kCreateObjectLiteral);
1409 } else {
1410 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1411 __ CallStub(&stub);
Ben Murdochc5610432016-08-08 18:44:38 +01001412 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001413 }
Ben Murdochc5610432016-08-08 18:44:38 +01001414 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415
1416 // If result_saved is true the result is on top of the stack. If
1417 // result_saved is false the result is in x0.
1418 bool result_saved = false;
1419
1420 AccessorTable accessor_table(zone());
1421 int property_index = 0;
1422 for (; property_index < expr->properties()->length(); property_index++) {
1423 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1424 if (property->is_computed_name()) break;
1425 if (property->IsCompileTimeValue()) continue;
1426
1427 Literal* key = property->key()->AsLiteral();
1428 Expression* value = property->value();
1429 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001430 PushOperand(x0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431 result_saved = true;
1432 }
1433 switch (property->kind()) {
1434 case ObjectLiteral::Property::CONSTANT:
1435 UNREACHABLE();
1436 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1437 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1438 // Fall through.
1439 case ObjectLiteral::Property::COMPUTED:
1440 // It is safe to use [[Put]] here because the boilerplate already
1441 // contains computed properties with an uninitialized value.
1442 if (key->value()->IsInternalizedString()) {
1443 if (property->emit_store()) {
1444 VisitForAccumulatorValue(value);
1445 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1446 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1447 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1448 EmitLoadStoreICSlot(property->GetSlot(0));
1449 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01001450 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451
1452 if (NeedsHomeObject(value)) {
1453 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1454 }
1455 } else {
1456 VisitForEffect(value);
1457 }
1458 break;
1459 }
1460 __ Peek(x0, 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001461 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001462 VisitForStackValue(key);
1463 VisitForStackValue(value);
1464 if (property->emit_store()) {
1465 if (NeedsHomeObject(value)) {
1466 EmitSetHomeObject(value, 2, property->GetSlot());
1467 }
1468 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
Ben Murdoch097c5b22016-05-18 11:27:45 +01001469 PushOperand(x0);
1470 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001471 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001472 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473 }
1474 break;
1475 case ObjectLiteral::Property::PROTOTYPE:
1476 DCHECK(property->emit_store());
1477 // Duplicate receiver on stack.
1478 __ Peek(x0, 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001479 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001480 VisitForStackValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001481 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001483 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 break;
1485 case ObjectLiteral::Property::GETTER:
1486 if (property->emit_store()) {
1487 accessor_table.lookup(key)->second->getter = property;
1488 }
1489 break;
1490 case ObjectLiteral::Property::SETTER:
1491 if (property->emit_store()) {
1492 accessor_table.lookup(key)->second->setter = property;
1493 }
1494 break;
1495 }
1496 }
1497
1498 // Emit code to define accessors, using only a single call to the runtime for
1499 // each pair of corresponding getters and setters.
1500 for (AccessorTable::Iterator it = accessor_table.begin();
1501 it != accessor_table.end();
1502 ++it) {
1503 __ Peek(x10, 0); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001504 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 VisitForStackValue(it->first);
1506 EmitAccessor(it->second->getter);
1507 EmitAccessor(it->second->setter);
1508 __ Mov(x10, Smi::FromInt(NONE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001509 PushOperand(x10);
1510 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511 }
1512
1513 // Object literals have two parts. The "static" part on the left contains no
1514 // computed property names, and so we can compute its map ahead of time; see
1515 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1516 // starts with the first computed property name, and continues with all
1517 // properties to its right. All the code from above initializes the static
1518 // component of the object literal, and arranges for the map of the result to
1519 // reflect the static order in which the keys appear. For the dynamic
1520 // properties, we compile them into a series of "SetOwnProperty" runtime
1521 // calls. This will preserve insertion order.
1522 for (; property_index < expr->properties()->length(); property_index++) {
1523 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1524
1525 Expression* value = property->value();
1526 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001527 PushOperand(x0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 result_saved = true;
1529 }
1530
1531 __ Peek(x10, 0); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001532 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001533
1534 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1535 DCHECK(!property->is_computed_name());
1536 VisitForStackValue(value);
1537 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001538 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001539 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
Ben Murdochc5610432016-08-08 18:44:38 +01001540 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541 } else {
1542 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1543 VisitForStackValue(value);
1544 if (NeedsHomeObject(value)) {
1545 EmitSetHomeObject(value, 2, property->GetSlot());
1546 }
1547
1548 switch (property->kind()) {
1549 case ObjectLiteral::Property::CONSTANT:
1550 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1551 case ObjectLiteral::Property::COMPUTED:
1552 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001553 PushOperand(Smi::FromInt(NONE));
1554 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1555 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001556 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001557 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 }
1559 break;
1560
1561 case ObjectLiteral::Property::PROTOTYPE:
1562 UNREACHABLE();
1563 break;
1564
1565 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001566 PushOperand(Smi::FromInt(NONE));
1567 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001568 break;
1569
1570 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001571 PushOperand(Smi::FromInt(NONE));
1572 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573 break;
1574 }
1575 }
1576 }
1577
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001578 if (result_saved) {
1579 context()->PlugTOS();
1580 } else {
1581 context()->Plug(x0);
1582 }
1583}
1584
1585
1586void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1587 Comment cmnt(masm_, "[ ArrayLiteral");
1588
1589 Handle<FixedArray> constant_elements = expr->constant_elements();
1590 bool has_fast_elements =
1591 IsFastObjectElementsKind(expr->constant_elements_kind());
1592
1593 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1594 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1595 // If the only customer of allocation sites is transitioning, then
1596 // we can turn it off if we don't have anywhere else to transition to.
1597 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1598 }
1599
1600 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1601 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1602 __ Mov(x1, Operand(constant_elements));
1603 if (MustCreateArrayLiteralWithRuntime(expr)) {
1604 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1605 __ Push(x3, x2, x1, x0);
1606 __ CallRuntime(Runtime::kCreateArrayLiteral);
1607 } else {
1608 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1609 __ CallStub(&stub);
1610 }
Ben Murdochc5610432016-08-08 18:44:38 +01001611 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001612
1613 bool result_saved = false; // Is the result saved to the stack?
1614 ZoneList<Expression*>* subexprs = expr->values();
1615 int length = subexprs->length();
1616
1617 // Emit code to evaluate all the non-constant subexpressions and to store
1618 // them into the newly cloned array.
1619 int array_index = 0;
1620 for (; array_index < length; array_index++) {
1621 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001622 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001623
1624 // If the subexpression is a literal or a simple materialized literal it
1625 // is already set in the cloned array.
1626 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1627
1628 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001629 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630 result_saved = true;
1631 }
1632 VisitForAccumulatorValue(subexpr);
1633
1634 __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1635 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1636 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1637 Handle<Code> ic =
1638 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1639 CallIC(ic);
1640
Ben Murdochc5610432016-08-08 18:44:38 +01001641 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1642 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001643 }
1644
1645 // In case the array literal contains spread expressions it has two parts. The
1646 // first part is the "static" array which has a literal index is handled
1647 // above. The second part is the part after the first spread expression
1648 // (inclusive) and these elements gets appended to the array. Note that the
1649 // number elements an iterable produces is unknown ahead of time.
1650 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001651 PopOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001652 result_saved = false;
1653 }
1654 for (; array_index < length; array_index++) {
1655 Expression* subexpr = subexprs->at(array_index);
1656
Ben Murdoch097c5b22016-05-18 11:27:45 +01001657 PushOperand(x0);
1658 DCHECK(!subexpr->IsSpread());
1659 VisitForStackValue(subexpr);
1660 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001661
Ben Murdochc5610432016-08-08 18:44:38 +01001662 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1663 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001664 }
1665
1666 if (result_saved) {
1667 context()->PlugTOS();
1668 } else {
1669 context()->Plug(x0);
1670 }
1671}
1672
1673
1674void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1675 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1676
1677 Comment cmnt(masm_, "[ Assignment");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001678
1679 Property* property = expr->target()->AsProperty();
1680 LhsKind assign_type = Property::GetAssignType(property);
1681
1682 // Evaluate LHS expression.
1683 switch (assign_type) {
1684 case VARIABLE:
1685 // Nothing to do here.
1686 break;
1687 case NAMED_PROPERTY:
1688 if (expr->is_compound()) {
1689 // We need the receiver both on the stack and in the register.
1690 VisitForStackValue(property->obj());
1691 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1692 } else {
1693 VisitForStackValue(property->obj());
1694 }
1695 break;
1696 case NAMED_SUPER_PROPERTY:
1697 VisitForStackValue(
1698 property->obj()->AsSuperPropertyReference()->this_var());
1699 VisitForAccumulatorValue(
1700 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001701 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001702 if (expr->is_compound()) {
1703 const Register scratch = x10;
1704 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001705 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 }
1707 break;
1708 case KEYED_SUPER_PROPERTY:
1709 VisitForStackValue(
1710 property->obj()->AsSuperPropertyReference()->this_var());
1711 VisitForStackValue(
1712 property->obj()->AsSuperPropertyReference()->home_object());
1713 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001714 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001715 if (expr->is_compound()) {
1716 const Register scratch1 = x10;
1717 const Register scratch2 = x11;
1718 __ Peek(scratch1, 2 * kPointerSize);
1719 __ Peek(scratch2, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001720 PushOperands(scratch1, scratch2, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 }
1722 break;
1723 case KEYED_PROPERTY:
1724 if (expr->is_compound()) {
1725 VisitForStackValue(property->obj());
1726 VisitForStackValue(property->key());
1727 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1728 __ Peek(LoadDescriptor::NameRegister(), 0);
1729 } else {
1730 VisitForStackValue(property->obj());
1731 VisitForStackValue(property->key());
1732 }
1733 break;
1734 }
1735
1736 // For compound assignments we need another deoptimization point after the
1737 // variable/property load.
1738 if (expr->is_compound()) {
1739 { AccumulatorValueContext context(this);
1740 switch (assign_type) {
1741 case VARIABLE:
1742 EmitVariableLoad(expr->target()->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01001743 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001744 break;
1745 case NAMED_PROPERTY:
1746 EmitNamedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001747 PrepareForBailoutForId(property->LoadId(),
1748 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001749 break;
1750 case NAMED_SUPER_PROPERTY:
1751 EmitNamedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001752 PrepareForBailoutForId(property->LoadId(),
1753 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001754 break;
1755 case KEYED_SUPER_PROPERTY:
1756 EmitKeyedSuperPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001757 PrepareForBailoutForId(property->LoadId(),
1758 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759 break;
1760 case KEYED_PROPERTY:
1761 EmitKeyedPropertyLoad(property);
Ben Murdochc5610432016-08-08 18:44:38 +01001762 PrepareForBailoutForId(property->LoadId(),
1763 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764 break;
1765 }
1766 }
1767
1768 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001769 PushOperand(x0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001770 VisitForAccumulatorValue(expr->value());
1771
1772 AccumulatorValueContext context(this);
1773 if (ShouldInlineSmiCase(op)) {
1774 EmitInlineSmiBinaryOp(expr->binary_operation(),
1775 op,
1776 expr->target(),
1777 expr->value());
1778 } else {
1779 EmitBinaryOp(expr->binary_operation(), op);
1780 }
1781
1782 // Deoptimization point in case the binary operation may have side effects.
Ben Murdochc5610432016-08-08 18:44:38 +01001783 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001784 } else {
1785 VisitForAccumulatorValue(expr->value());
1786 }
1787
1788 SetExpressionPosition(expr);
1789
1790 // Store the value.
1791 switch (assign_type) {
1792 case VARIABLE:
1793 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1794 expr->op(), expr->AssignmentSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01001795 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001796 context()->Plug(x0);
1797 break;
1798 case NAMED_PROPERTY:
1799 EmitNamedPropertyAssignment(expr);
1800 break;
1801 case NAMED_SUPER_PROPERTY:
1802 EmitNamedSuperPropertyStore(property);
1803 context()->Plug(x0);
1804 break;
1805 case KEYED_SUPER_PROPERTY:
1806 EmitKeyedSuperPropertyStore(property);
1807 context()->Plug(x0);
1808 break;
1809 case KEYED_PROPERTY:
1810 EmitKeyedPropertyAssignment(expr);
1811 break;
1812 }
1813}
1814
1815
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001816void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1817 Token::Value op,
1818 Expression* left_expr,
1819 Expression* right_expr) {
1820 Label done, both_smis, stub_call;
1821
1822 // Get the arguments.
1823 Register left = x1;
1824 Register right = x0;
1825 Register result = x0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001826 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827
1828 // Perform combined smi check on both operands.
1829 __ Orr(x10, left, right);
1830 JumpPatchSite patch_site(masm_);
1831 patch_site.EmitJumpIfSmi(x10, &both_smis);
1832
1833 __ Bind(&stub_call);
1834
Ben Murdoch097c5b22016-05-18 11:27:45 +01001835 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001836 {
1837 Assembler::BlockPoolsScope scope(masm_);
1838 CallIC(code, expr->BinaryOperationFeedbackId());
1839 patch_site.EmitPatchInfo();
1840 }
1841 __ B(&done);
1842
1843 __ Bind(&both_smis);
1844 // Smi case. This code works in the same way as the smi-smi case in the type
1845 // recording binary operation stub, see
1846 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1847 // TODO(all): That doesn't exist any more. Where are the comments?
1848 //
1849 // The set of operations that needs to be supported here is controlled by
1850 // FullCodeGenerator::ShouldInlineSmiCase().
1851 switch (op) {
1852 case Token::SAR:
1853 __ Ubfx(right, right, kSmiShift, 5);
1854 __ Asr(result, left, right);
1855 __ Bic(result, result, kSmiShiftMask);
1856 break;
1857 case Token::SHL:
1858 __ Ubfx(right, right, kSmiShift, 5);
1859 __ Lsl(result, left, right);
1860 break;
1861 case Token::SHR:
1862 // If `left >>> right` >= 0x80000000, the result is not representable in a
1863 // signed 32-bit smi.
1864 __ Ubfx(right, right, kSmiShift, 5);
1865 __ Lsr(x10, left, right);
1866 __ Tbnz(x10, kXSignBit, &stub_call);
1867 __ Bic(result, x10, kSmiShiftMask);
1868 break;
1869 case Token::ADD:
1870 __ Adds(x10, left, right);
1871 __ B(vs, &stub_call);
1872 __ Mov(result, x10);
1873 break;
1874 case Token::SUB:
1875 __ Subs(x10, left, right);
1876 __ B(vs, &stub_call);
1877 __ Mov(result, x10);
1878 break;
1879 case Token::MUL: {
1880 Label not_minus_zero, done;
1881 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
1882 STATIC_ASSERT(kSmiTag == 0);
1883 __ Smulh(x10, left, right);
1884 __ Cbnz(x10, &not_minus_zero);
1885 __ Eor(x11, left, right);
1886 __ Tbnz(x11, kXSignBit, &stub_call);
1887 __ Mov(result, x10);
1888 __ B(&done);
1889 __ Bind(&not_minus_zero);
1890 __ Cls(x11, x10);
1891 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
1892 __ B(lt, &stub_call);
1893 __ SmiTag(result, x10);
1894 __ Bind(&done);
1895 break;
1896 }
1897 case Token::BIT_OR:
1898 __ Orr(result, left, right);
1899 break;
1900 case Token::BIT_AND:
1901 __ And(result, left, right);
1902 break;
1903 case Token::BIT_XOR:
1904 __ Eor(result, left, right);
1905 break;
1906 default:
1907 UNREACHABLE();
1908 }
1909
1910 __ Bind(&done);
1911 context()->Plug(x0);
1912}
1913
1914
1915void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001916 PopOperand(x1);
1917 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001918 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
1919 {
1920 Assembler::BlockPoolsScope scope(masm_);
1921 CallIC(code, expr->BinaryOperationFeedbackId());
1922 patch_site.EmitPatchInfo();
1923 }
1924 context()->Plug(x0);
1925}
1926
1927
1928void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001929 for (int i = 0; i < lit->properties()->length(); i++) {
1930 ObjectLiteral::Property* property = lit->properties()->at(i);
1931 Expression* value = property->value();
1932
Ben Murdoch097c5b22016-05-18 11:27:45 +01001933 Register scratch = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001934 if (property->is_static()) {
1935 __ Peek(scratch, kPointerSize); // constructor
1936 } else {
1937 __ Peek(scratch, 0); // prototype
1938 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001939 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001940 EmitPropertyKey(property, lit->GetIdForProperty(i));
1941
1942 // The static prototype property is read only. We handle the non computed
1943 // property name case in the parser. Since this is the only case where we
1944 // need to check for an own read only property we special case this so we do
1945 // not need to do this for every property.
1946 if (property->is_static() && property->is_computed_name()) {
1947 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1948 __ Push(x0);
1949 }
1950
1951 VisitForStackValue(value);
1952 if (NeedsHomeObject(value)) {
1953 EmitSetHomeObject(value, 2, property->GetSlot());
1954 }
1955
1956 switch (property->kind()) {
1957 case ObjectLiteral::Property::CONSTANT:
1958 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1959 case ObjectLiteral::Property::PROTOTYPE:
1960 UNREACHABLE();
1961 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001962 PushOperand(Smi::FromInt(DONT_ENUM));
1963 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1964 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001965 break;
1966
1967 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001968 PushOperand(Smi::FromInt(DONT_ENUM));
1969 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001970 break;
1971
1972 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001973 PushOperand(Smi::FromInt(DONT_ENUM));
1974 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001975 break;
1976
1977 default:
1978 UNREACHABLE();
1979 }
1980 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001981}
1982
1983
1984void FullCodeGenerator::EmitAssignment(Expression* expr,
1985 FeedbackVectorSlot slot) {
1986 DCHECK(expr->IsValidReferenceExpressionOrThis());
1987
1988 Property* prop = expr->AsProperty();
1989 LhsKind assign_type = Property::GetAssignType(prop);
1990
1991 switch (assign_type) {
1992 case VARIABLE: {
1993 Variable* var = expr->AsVariableProxy()->var();
1994 EffectContext context(this);
1995 EmitVariableAssignment(var, Token::ASSIGN, slot);
1996 break;
1997 }
1998 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001999 PushOperand(x0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002000 VisitForAccumulatorValue(prop->obj());
2001 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2002 // this copy.
2003 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002004 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002005 __ Mov(StoreDescriptor::NameRegister(),
2006 Operand(prop->key()->AsLiteral()->value()));
2007 EmitLoadStoreICSlot(slot);
2008 CallStoreIC();
2009 break;
2010 }
2011 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002012 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002013 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2014 VisitForAccumulatorValue(
2015 prop->obj()->AsSuperPropertyReference()->home_object());
2016 // stack: value, this; x0: home_object
2017 Register scratch = x10;
2018 Register scratch2 = x11;
2019 __ mov(scratch, result_register()); // home_object
2020 __ Peek(x0, kPointerSize); // value
2021 __ Peek(scratch2, 0); // this
2022 __ Poke(scratch2, kPointerSize); // this
2023 __ Poke(scratch, 0); // home_object
2024 // stack: this, home_object; x0: value
2025 EmitNamedSuperPropertyStore(prop);
2026 break;
2027 }
2028 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002029 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002030 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2031 VisitForStackValue(
2032 prop->obj()->AsSuperPropertyReference()->home_object());
2033 VisitForAccumulatorValue(prop->key());
2034 Register scratch = x10;
2035 Register scratch2 = x11;
2036 __ Peek(scratch2, 2 * kPointerSize); // value
2037 // stack: value, this, home_object; x0: key, x11: value
2038 __ Peek(scratch, kPointerSize); // this
2039 __ Poke(scratch, 2 * kPointerSize);
2040 __ Peek(scratch, 0); // home_object
2041 __ Poke(scratch, kPointerSize);
2042 __ Poke(x0, 0);
2043 __ Move(x0, scratch2);
2044 // stack: this, home_object, key; x0: value.
2045 EmitKeyedSuperPropertyStore(prop);
2046 break;
2047 }
2048 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002049 PushOperand(x0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002050 VisitForStackValue(prop->obj());
2051 VisitForAccumulatorValue(prop->key());
2052 __ Mov(StoreDescriptor::NameRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002053 PopOperands(StoreDescriptor::ReceiverRegister(),
2054 StoreDescriptor::ValueRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002055 EmitLoadStoreICSlot(slot);
2056 Handle<Code> ic =
2057 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2058 CallIC(ic);
2059 break;
2060 }
2061 }
2062 context()->Plug(x0);
2063}
2064
2065
2066void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2067 Variable* var, MemOperand location) {
2068 __ Str(result_register(), location);
2069 if (var->IsContextSlot()) {
2070 // RecordWrite may destroy all its register arguments.
2071 __ Mov(x10, result_register());
2072 int offset = Context::SlotOffset(var->index());
2073 __ RecordWriteContextSlot(
2074 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2075 }
2076}
2077
2078
2079void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2080 FeedbackVectorSlot slot) {
2081 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2082 if (var->IsUnallocated()) {
2083 // Global var, const, or let.
2084 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2085 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2086 EmitLoadStoreICSlot(slot);
2087 CallStoreIC();
2088
2089 } else if (var->mode() == LET && op != Token::INIT) {
2090 // Non-initializing assignment to let variable needs a write barrier.
2091 DCHECK(!var->IsLookupSlot());
2092 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2093 Label assign;
2094 MemOperand location = VarOperand(var, x1);
2095 __ Ldr(x10, location);
2096 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2097 __ Mov(x10, Operand(var->name()));
2098 __ Push(x10);
2099 __ CallRuntime(Runtime::kThrowReferenceError);
2100 // Perform the assignment.
2101 __ Bind(&assign);
2102 EmitStoreToStackLocalOrContextSlot(var, location);
2103
2104 } else if (var->mode() == CONST && op != Token::INIT) {
2105 // Assignment to const variable needs a write barrier.
2106 DCHECK(!var->IsLookupSlot());
2107 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2108 Label const_error;
2109 MemOperand location = VarOperand(var, x1);
2110 __ Ldr(x10, location);
2111 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2112 __ Mov(x10, Operand(var->name()));
2113 __ Push(x10);
2114 __ CallRuntime(Runtime::kThrowReferenceError);
2115 __ Bind(&const_error);
2116 __ CallRuntime(Runtime::kThrowConstAssignError);
2117
2118 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2119 // Initializing assignment to const {this} needs a write barrier.
2120 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2121 Label uninitialized_this;
2122 MemOperand location = VarOperand(var, x1);
2123 __ Ldr(x10, location);
2124 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2125 __ Mov(x0, Operand(var->name()));
2126 __ Push(x0);
2127 __ CallRuntime(Runtime::kThrowReferenceError);
2128 __ bind(&uninitialized_this);
2129 EmitStoreToStackLocalOrContextSlot(var, location);
2130
Ben Murdochc5610432016-08-08 18:44:38 +01002131 } else if (!var->is_const_mode() || op == Token::INIT) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002132 if (var->IsLookupSlot()) {
2133 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002134 __ Push(var->name());
2135 __ Push(x0);
2136 __ CallRuntime(is_strict(language_mode())
2137 ? Runtime::kStoreLookupSlot_Strict
2138 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002139 } else {
2140 // Assignment to var or initializing assignment to let/const in harmony
2141 // mode.
2142 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2143 MemOperand location = VarOperand(var, x1);
2144 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2145 __ Ldr(x10, location);
2146 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2147 __ Check(eq, kLetBindingReInitialization);
2148 }
2149 EmitStoreToStackLocalOrContextSlot(var, location);
2150 }
2151
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002152 } else {
2153 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2154 if (is_strict(language_mode())) {
2155 __ CallRuntime(Runtime::kThrowConstAssignError);
2156 }
2157 // Silently ignore store in sloppy mode.
2158 }
2159}
2160
2161
2162void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2163 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2164 // Assignment to a property, using a named store IC.
2165 Property* prop = expr->target()->AsProperty();
2166 DCHECK(prop != NULL);
2167 DCHECK(prop->key()->IsLiteral());
2168
2169 __ Mov(StoreDescriptor::NameRegister(),
2170 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002171 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002172 EmitLoadStoreICSlot(expr->AssignmentSlot());
2173 CallStoreIC();
2174
Ben Murdochc5610432016-08-08 18:44:38 +01002175 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002176 context()->Plug(x0);
2177}
2178
2179
2180void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2181 // Assignment to named property of super.
2182 // x0 : value
2183 // stack : receiver ('this'), home_object
2184 DCHECK(prop != NULL);
2185 Literal* key = prop->key()->AsLiteral();
2186 DCHECK(key != NULL);
2187
Ben Murdoch097c5b22016-05-18 11:27:45 +01002188 PushOperand(key->value());
2189 PushOperand(x0);
2190 CallRuntimeWithOperands(is_strict(language_mode())
2191 ? Runtime::kStoreToSuper_Strict
2192 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002193}
2194
2195
2196void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2197 // Assignment to named property of super.
2198 // x0 : value
2199 // stack : receiver ('this'), home_object, key
2200 DCHECK(prop != NULL);
2201
Ben Murdoch097c5b22016-05-18 11:27:45 +01002202 PushOperand(x0);
2203 CallRuntimeWithOperands(is_strict(language_mode())
2204 ? Runtime::kStoreKeyedToSuper_Strict
2205 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002206}
2207
2208
2209void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2210 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2211 // Assignment to a property, using a keyed store IC.
2212
2213 // TODO(all): Could we pass this in registers rather than on the stack?
Ben Murdoch097c5b22016-05-18 11:27:45 +01002214 PopOperands(StoreDescriptor::NameRegister(),
2215 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002216 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2217
2218 Handle<Code> ic =
2219 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2220 EmitLoadStoreICSlot(expr->AssignmentSlot());
2221 CallIC(ic);
2222
Ben Murdochc5610432016-08-08 18:44:38 +01002223 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002224 context()->Plug(x0);
2225}
2226
2227
2228void FullCodeGenerator::CallIC(Handle<Code> code,
2229 TypeFeedbackId ast_id) {
2230 ic_total_count_++;
2231 // All calls must have a predictable size in full-codegen code to ensure that
2232 // the debugger can patch them correctly.
2233 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2234}
2235
2236
2237// Code common for calls using the IC.
2238void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2239 ASM_LOCATION("FullCodeGenerator::EmitCallWithLoadIC");
2240 Expression* callee = expr->expression();
2241
2242 // Get the target function.
2243 ConvertReceiverMode convert_mode;
2244 if (callee->IsVariableProxy()) {
2245 { StackValueContext context(this);
2246 EmitVariableLoad(callee->AsVariableProxy());
Ben Murdochc5610432016-08-08 18:44:38 +01002247 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002248 }
2249 // Push undefined as receiver. This is patched in the method prologue if it
2250 // is a sloppy mode method.
2251 {
2252 UseScratchRegisterScope temps(masm_);
2253 Register temp = temps.AcquireX();
2254 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255 PushOperand(temp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002256 }
2257 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2258 } else {
2259 // Load the function from the receiver.
2260 DCHECK(callee->IsProperty());
2261 DCHECK(!callee->AsProperty()->IsSuperAccess());
2262 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2263 EmitNamedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002264 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2265 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002266 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002267 PopOperand(x10);
2268 PushOperands(x0, x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2270 }
2271
2272 EmitCall(expr, convert_mode);
2273}
2274
2275
2276void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2277 ASM_LOCATION("FullCodeGenerator::EmitSuperCallWithLoadIC");
2278 Expression* callee = expr->expression();
2279 DCHECK(callee->IsProperty());
2280 Property* prop = callee->AsProperty();
2281 DCHECK(prop->IsSuperAccess());
2282 SetExpressionPosition(prop);
2283
2284 Literal* key = prop->key()->AsLiteral();
2285 DCHECK(!key->value()->IsSmi());
2286
2287 // Load the function from the receiver.
2288 const Register scratch = x10;
2289 SuperPropertyReference* super_ref =
2290 callee->AsProperty()->obj()->AsSuperPropertyReference();
2291 VisitForStackValue(super_ref->home_object());
2292 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002293 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002295 PushOperands(x0, scratch);
2296 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002297
2298 // Stack here:
2299 // - home_object
2300 // - this (receiver)
2301 // - this (receiver) <-- LoadFromSuper will pop here and below.
2302 // - home_object
Ben Murdoch097c5b22016-05-18 11:27:45 +01002303 // - key
2304 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002305 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002306
2307 // Replace home_object with target function.
2308 __ Poke(x0, kPointerSize);
2309
2310 // Stack here:
2311 // - target function
2312 // - this (receiver)
2313 EmitCall(expr);
2314}
2315
2316
2317// Code common for calls using the IC.
2318void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2319 Expression* key) {
2320 ASM_LOCATION("FullCodeGenerator::EmitKeyedCallWithLoadIC");
2321 // Load the key.
2322 VisitForAccumulatorValue(key);
2323
2324 Expression* callee = expr->expression();
2325
2326 // Load the function from the receiver.
2327 DCHECK(callee->IsProperty());
2328 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2329 __ Move(LoadDescriptor::NameRegister(), x0);
2330 EmitKeyedPropertyLoad(callee->AsProperty());
Ben Murdochc5610432016-08-08 18:44:38 +01002331 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2332 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002333
2334 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002335 PopOperand(x10);
2336 PushOperands(x0, x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002337
2338 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2339}
2340
2341
2342void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2343 ASM_LOCATION("FullCodeGenerator::EmitKeyedSuperCallWithLoadIC");
2344 Expression* callee = expr->expression();
2345 DCHECK(callee->IsProperty());
2346 Property* prop = callee->AsProperty();
2347 DCHECK(prop->IsSuperAccess());
2348 SetExpressionPosition(prop);
2349
2350 // Load the function from the receiver.
2351 const Register scratch = x10;
2352 SuperPropertyReference* super_ref =
2353 callee->AsProperty()->obj()->AsSuperPropertyReference();
2354 VisitForStackValue(super_ref->home_object());
2355 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002356 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002357 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002358 PushOperands(x0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002359 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002360
2361 // Stack here:
2362 // - home_object
2363 // - this (receiver)
2364 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2365 // - home_object
2366 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002367 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdochc5610432016-08-08 18:44:38 +01002368 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002369
2370 // Replace home_object with target function.
2371 __ Poke(x0, kPointerSize);
2372
2373 // Stack here:
2374 // - target function
2375 // - this (receiver)
2376 EmitCall(expr);
2377}
2378
2379
2380void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2381 ASM_LOCATION("FullCodeGenerator::EmitCall");
2382 // Load the arguments.
2383 ZoneList<Expression*>* args = expr->arguments();
2384 int arg_count = args->length();
2385 for (int i = 0; i < arg_count; i++) {
2386 VisitForStackValue(args->at(i));
2387 }
2388
Ben Murdochc5610432016-08-08 18:44:38 +01002389 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002390 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002391 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2392 if (FLAG_trace) {
2393 __ CallRuntime(Runtime::kTraceTailCall);
2394 }
2395 // Update profiling counters before the tail call since we will
2396 // not return to this function.
2397 EmitProfilingCounterHandlingForReturnSequence(true);
2398 }
2399 Handle<Code> ic =
2400 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2401 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002402 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2403 __ Peek(x1, (arg_count + 1) * kXRegSize);
2404 // Don't assign a type feedback id to the IC, since type feedback is provided
2405 // by the vector above.
2406 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002407 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002408
2409 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002410 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002411 context()->DropAndPlug(1, x0);
2412}
2413
Ben Murdochc5610432016-08-08 18:44:38 +01002414void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2415 int arg_count = expr->arguments()->length();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002416 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2417 // Prepare to push a copy of the first argument or undefined if it doesn't
2418 // exist.
2419 if (arg_count > 0) {
2420 __ Peek(x9, arg_count * kXRegSize);
2421 } else {
2422 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2423 }
2424
2425 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2426
2427 // Prepare to push the language mode.
2428 __ Mov(x11, Smi::FromInt(language_mode()));
2429 // Prepare to push the start position of the scope the calls resides in.
2430 __ Mov(x12, Smi::FromInt(scope()->start_position()));
Ben Murdochc5610432016-08-08 18:44:38 +01002431 // Prepare to push the source position of the eval call.
2432 __ Mov(x13, Smi::FromInt(expr->position()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002433
2434 // Push.
Ben Murdochc5610432016-08-08 18:44:38 +01002435 __ Push(x9, x10, x11, x12, x13);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002436
2437 // Do the runtime call.
2438 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2439}
2440
2441
2442// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2443void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2444 VariableProxy* callee = expr->expression()->AsVariableProxy();
2445 if (callee->var()->IsLookupSlot()) {
2446 Label slow, done;
2447 SetExpressionPosition(callee);
2448 // Generate code for loading from variables potentially shadowed
2449 // by eval-introduced variables.
2450 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2451
2452 __ Bind(&slow);
2453 // Call the runtime to find the function to call (returned in x0)
2454 // and the object holding it (returned in x1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002455 __ Push(callee->name());
2456 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2457 PushOperands(x0, x1); // Receiver, function.
Ben Murdochc5610432016-08-08 18:44:38 +01002458 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002459
2460 // If fast case code has been generated, emit code to push the
2461 // function and receiver and have the slow path jump around this
2462 // code.
2463 if (done.is_linked()) {
2464 Label call;
2465 __ B(&call);
2466 __ Bind(&done);
2467 // Push function.
2468 // The receiver is implicitly the global receiver. Indicate this
2469 // by passing the undefined to the call function stub.
2470 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2471 __ Push(x0, x1);
2472 __ Bind(&call);
2473 }
2474 } else {
2475 VisitForStackValue(callee);
2476 // refEnv.WithBaseObject()
2477 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002478 PushOperand(x10); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002479 }
2480}
2481
2482
2483void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2484 ASM_LOCATION("FullCodeGenerator::EmitPossiblyEvalCall");
Ben Murdochc5610432016-08-08 18:44:38 +01002485 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002486 // to resolve the function we need to call. Then we call the resolved
2487 // function using the given arguments.
2488 ZoneList<Expression*>* args = expr->arguments();
2489 int arg_count = args->length();
2490
2491 PushCalleeAndWithBaseObject(expr);
2492
2493 // Push the arguments.
2494 for (int i = 0; i < arg_count; i++) {
2495 VisitForStackValue(args->at(i));
2496 }
2497
2498 // Push a copy of the function (found below the arguments) and
2499 // resolve eval.
2500 __ Peek(x10, (arg_count + 1) * kPointerSize);
2501 __ Push(x10);
Ben Murdochc5610432016-08-08 18:44:38 +01002502 EmitResolvePossiblyDirectEval(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002503
2504 // Touch up the stack with the resolved function.
2505 __ Poke(x0, (arg_count + 1) * kPointerSize);
2506
Ben Murdochc5610432016-08-08 18:44:38 +01002507 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002508
2509 // Record source position for debugger.
2510 SetCallPosition(expr);
2511
2512 // Call the evaluated function.
2513 __ Peek(x1, (arg_count + 1) * kXRegSize);
2514 __ Mov(x0, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002515 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2516 expr->tail_call_mode()),
2517 RelocInfo::CODE_TARGET);
2518 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002519 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002520 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002521 context()->DropAndPlug(1, x0);
2522}
2523
2524
2525void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2526 Comment cmnt(masm_, "[ CallNew");
2527 // According to ECMA-262, section 11.2.2, page 44, the function
2528 // expression in new calls must be evaluated before the
2529 // arguments.
2530
2531 // Push constructor on the stack. If it's not a function it's used as
2532 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2533 // ignored.
2534 DCHECK(!expr->expression()->IsSuperPropertyReference());
2535 VisitForStackValue(expr->expression());
2536
2537 // Push the arguments ("left-to-right") on the stack.
2538 ZoneList<Expression*>* args = expr->arguments();
2539 int arg_count = args->length();
2540 for (int i = 0; i < arg_count; i++) {
2541 VisitForStackValue(args->at(i));
2542 }
2543
2544 // Call the construct call builtin that handles allocation and
2545 // constructor invocation.
2546 SetConstructCallPosition(expr);
2547
2548 // Load function and argument count into x1 and x0.
2549 __ Mov(x0, arg_count);
2550 __ Peek(x1, arg_count * kXRegSize);
2551
2552 // Record call targets in unoptimized code.
2553 __ EmitLoadTypeFeedbackVector(x2);
2554 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2555
2556 CallConstructStub stub(isolate());
2557 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002558 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01002559 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2560 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002561 context()->Plug(x0);
2562}
2563
2564
2565void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2566 ASM_LOCATION("FullCodeGenerator::EmitSuperConstructorCall");
2567 SuperCallReference* super_call_ref =
2568 expr->expression()->AsSuperCallReference();
2569 DCHECK_NOT_NULL(super_call_ref);
2570
2571 // Push the super constructor target on the stack (may be null,
2572 // but the Construct builtin can deal with that properly).
2573 VisitForAccumulatorValue(super_call_ref->this_function_var());
2574 __ AssertFunction(result_register());
2575 __ Ldr(result_register(),
2576 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2577 __ Ldr(result_register(),
2578 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002579 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002580
2581 // Push the arguments ("left-to-right") on the stack.
2582 ZoneList<Expression*>* args = expr->arguments();
2583 int arg_count = args->length();
2584 for (int i = 0; i < arg_count; i++) {
2585 VisitForStackValue(args->at(i));
2586 }
2587
2588 // Call the construct call builtin that handles allocation and
2589 // constructor invocation.
2590 SetConstructCallPosition(expr);
2591
2592 // Load new target into x3.
2593 VisitForAccumulatorValue(super_call_ref->new_target_var());
2594 __ Mov(x3, result_register());
2595
2596 // Load function and argument count into x1 and x0.
2597 __ Mov(x0, arg_count);
2598 __ Peek(x1, arg_count * kXRegSize);
2599
2600 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002601 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002602
2603 RecordJSReturnSite(expr);
Ben Murdochc5610432016-08-08 18:44:38 +01002604 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002605 context()->Plug(x0);
2606}
2607
2608
2609void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2610 ZoneList<Expression*>* args = expr->arguments();
2611 DCHECK(args->length() == 1);
2612
2613 VisitForAccumulatorValue(args->at(0));
2614
2615 Label materialize_true, materialize_false;
2616 Label* if_true = NULL;
2617 Label* if_false = NULL;
2618 Label* fall_through = NULL;
2619 context()->PrepareTest(&materialize_true, &materialize_false,
2620 &if_true, &if_false, &fall_through);
2621
2622 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2623 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2624
2625 context()->Plug(if_true, if_false);
2626}
2627
2628
2629void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2630 ZoneList<Expression*>* args = expr->arguments();
2631 DCHECK(args->length() == 1);
2632
2633 VisitForAccumulatorValue(args->at(0));
2634
2635 Label materialize_true, materialize_false;
2636 Label* if_true = NULL;
2637 Label* if_false = NULL;
2638 Label* fall_through = NULL;
2639 context()->PrepareTest(&materialize_true, &materialize_false,
2640 &if_true, &if_false, &fall_through);
2641
2642 __ JumpIfSmi(x0, if_false);
2643 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2644 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2645 Split(ge, if_true, if_false, fall_through);
2646
2647 context()->Plug(if_true, if_false);
2648}
2649
2650
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002651void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2652 ZoneList<Expression*>* args = expr->arguments();
2653 DCHECK(args->length() == 1);
2654
2655 VisitForAccumulatorValue(args->at(0));
2656
2657 Label materialize_true, materialize_false;
2658 Label* if_true = NULL;
2659 Label* if_false = NULL;
2660 Label* fall_through = NULL;
2661 context()->PrepareTest(&materialize_true, &materialize_false,
2662 &if_true, &if_false, &fall_through);
2663
2664 __ JumpIfSmi(x0, if_false);
2665 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2666 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2667 Split(eq, if_true, if_false, fall_through);
2668
2669 context()->Plug(if_true, if_false);
2670}
2671
2672
2673void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2674 ZoneList<Expression*>* args = expr->arguments();
2675 DCHECK(args->length() == 1);
2676
2677 VisitForAccumulatorValue(args->at(0));
2678
2679 Label materialize_true, materialize_false;
2680 Label* if_true = NULL;
2681 Label* if_false = NULL;
2682 Label* fall_through = NULL;
2683 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2684 &if_false, &fall_through);
2685
2686 __ JumpIfSmi(x0, if_false);
2687 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
2688 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2689 Split(eq, if_true, if_false, fall_through);
2690
2691 context()->Plug(if_true, if_false);
2692}
2693
2694
2695void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2696 ZoneList<Expression*>* args = expr->arguments();
2697 DCHECK(args->length() == 1);
2698
2699 VisitForAccumulatorValue(args->at(0));
2700
2701 Label materialize_true, materialize_false;
2702 Label* if_true = NULL;
2703 Label* if_false = NULL;
2704 Label* fall_through = NULL;
2705 context()->PrepareTest(&materialize_true, &materialize_false,
2706 &if_true, &if_false, &fall_through);
2707
2708 __ JumpIfSmi(x0, if_false);
2709 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2710 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2711 Split(eq, if_true, if_false, fall_through);
2712
2713 context()->Plug(if_true, if_false);
2714}
2715
2716
2717void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2718 ZoneList<Expression*>* args = expr->arguments();
2719 DCHECK(args->length() == 1);
2720
2721 VisitForAccumulatorValue(args->at(0));
2722
2723 Label materialize_true, materialize_false;
2724 Label* if_true = NULL;
2725 Label* if_false = NULL;
2726 Label* fall_through = NULL;
2727 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2728 &if_false, &fall_through);
2729
2730 __ JumpIfSmi(x0, if_false);
2731 __ CompareObjectType(x0, x10, x11, JS_PROXY_TYPE);
2732 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2733 Split(eq, if_true, if_false, fall_through);
2734
2735 context()->Plug(if_true, if_false);
2736}
2737
2738
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002739void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2740 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
2741 ZoneList<Expression*>* args = expr->arguments();
2742 DCHECK(args->length() == 1);
2743 Label done, null, function, non_function_constructor;
2744
2745 VisitForAccumulatorValue(args->at(0));
2746
2747 // If the object is not a JSReceiver, we return null.
2748 __ JumpIfSmi(x0, &null);
2749 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2750 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2751 // x10: object's map.
2752 // x11: object's type.
2753 __ B(lt, &null);
2754
2755 // Return 'Function' for JSFunction objects.
Ben Murdochda12d292016-06-02 14:46:10 +01002756 __ Cmp(x11, FIRST_FUNCTION_TYPE);
2757 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2758 __ B(hs, &function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002759
2760 // Check if the constructor in the map is a JS function.
2761 Register instance_type = x14;
2762 __ GetMapConstructor(x12, x10, x13, instance_type);
2763 __ Cmp(instance_type, JS_FUNCTION_TYPE);
2764 __ B(ne, &non_function_constructor);
2765
2766 // x12 now contains the constructor function. Grab the
2767 // instance class name from there.
2768 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
2769 __ Ldr(x0,
2770 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
2771 __ B(&done);
2772
2773 // Functions have class 'Function'.
2774 __ Bind(&function);
2775 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
2776 __ B(&done);
2777
2778 // Objects with a non-function constructor have class 'Object'.
2779 __ Bind(&non_function_constructor);
2780 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
2781 __ B(&done);
2782
2783 // Non-JS objects have class null.
2784 __ Bind(&null);
2785 __ LoadRoot(x0, Heap::kNullValueRootIndex);
2786
2787 // All done.
2788 __ Bind(&done);
2789
2790 context()->Plug(x0);
2791}
2792
2793
2794void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2795 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
2796 ZoneList<Expression*>* args = expr->arguments();
2797 DCHECK(args->length() == 1);
2798 VisitForAccumulatorValue(args->at(0)); // Load the object.
2799
2800 Label done;
2801 // If the object is a smi return the object.
2802 __ JumpIfSmi(x0, &done);
2803 // If the object is not a value type, return the object.
2804 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
2805 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
2806
2807 __ Bind(&done);
2808 context()->Plug(x0);
2809}
2810
2811
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002812void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2813 ZoneList<Expression*>* args = expr->arguments();
2814 DCHECK_EQ(3, args->length());
2815
2816 Register string = x0;
2817 Register index = x1;
2818 Register value = x2;
2819 Register scratch = x10;
2820
2821 VisitForStackValue(args->at(0)); // index
2822 VisitForStackValue(args->at(1)); // value
2823 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002824 PopOperands(value, index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002825
2826 if (FLAG_debug_code) {
2827 __ AssertSmi(value, kNonSmiValue);
2828 __ AssertSmi(index, kNonSmiIndex);
2829 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2830 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
2831 one_byte_seq_type);
2832 }
2833
2834 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
2835 __ SmiUntag(value);
2836 __ SmiUntag(index);
2837 __ Strb(value, MemOperand(scratch, index));
2838 context()->Plug(string);
2839}
2840
2841
2842void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2843 ZoneList<Expression*>* args = expr->arguments();
2844 DCHECK_EQ(3, args->length());
2845
2846 Register string = x0;
2847 Register index = x1;
2848 Register value = x2;
2849 Register scratch = x10;
2850
2851 VisitForStackValue(args->at(0)); // index
2852 VisitForStackValue(args->at(1)); // value
2853 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002854 PopOperands(value, index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002855
2856 if (FLAG_debug_code) {
2857 __ AssertSmi(value, kNonSmiValue);
2858 __ AssertSmi(index, kNonSmiIndex);
2859 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2860 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
2861 two_byte_seq_type);
2862 }
2863
2864 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
2865 __ SmiUntag(value);
2866 __ SmiUntag(index);
2867 __ Strh(value, MemOperand(scratch, index, LSL, 1));
2868 context()->Plug(string);
2869}
2870
2871
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002872void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2873 ZoneList<Expression*>* args = expr->arguments();
2874 DCHECK(args->length() == 1);
2875
2876 VisitForAccumulatorValue(args->at(0));
2877
2878 Label done;
2879 Register code = x0;
2880 Register result = x1;
2881
2882 StringCharFromCodeGenerator generator(code, result);
2883 generator.GenerateFast(masm_);
2884 __ B(&done);
2885
2886 NopRuntimeCallHelper call_helper;
2887 generator.GenerateSlow(masm_, call_helper);
2888
2889 __ Bind(&done);
2890 context()->Plug(result);
2891}
2892
2893
2894void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2895 ZoneList<Expression*>* args = expr->arguments();
2896 DCHECK(args->length() == 2);
2897
2898 VisitForStackValue(args->at(0));
2899 VisitForAccumulatorValue(args->at(1));
2900
2901 Register object = x1;
2902 Register index = x0;
2903 Register result = x3;
2904
Ben Murdoch097c5b22016-05-18 11:27:45 +01002905 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002906
2907 Label need_conversion;
2908 Label index_out_of_range;
2909 Label done;
2910 StringCharCodeAtGenerator generator(object,
2911 index,
2912 result,
2913 &need_conversion,
2914 &need_conversion,
2915 &index_out_of_range,
2916 STRING_INDEX_IS_NUMBER);
2917 generator.GenerateFast(masm_);
2918 __ B(&done);
2919
2920 __ Bind(&index_out_of_range);
2921 // When the index is out of range, the spec requires us to return NaN.
2922 __ LoadRoot(result, Heap::kNanValueRootIndex);
2923 __ B(&done);
2924
2925 __ Bind(&need_conversion);
2926 // Load the undefined value into the result register, which will
2927 // trigger conversion.
2928 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2929 __ B(&done);
2930
2931 NopRuntimeCallHelper call_helper;
2932 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2933
2934 __ Bind(&done);
2935 context()->Plug(result);
2936}
2937
2938
2939void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
2940 ZoneList<Expression*>* args = expr->arguments();
2941 DCHECK(args->length() == 2);
2942
2943 VisitForStackValue(args->at(0));
2944 VisitForAccumulatorValue(args->at(1));
2945
2946 Register object = x1;
2947 Register index = x0;
2948 Register result = x0;
2949
Ben Murdoch097c5b22016-05-18 11:27:45 +01002950 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002951
2952 Label need_conversion;
2953 Label index_out_of_range;
2954 Label done;
2955 StringCharAtGenerator generator(object,
2956 index,
2957 x3,
2958 result,
2959 &need_conversion,
2960 &need_conversion,
2961 &index_out_of_range,
2962 STRING_INDEX_IS_NUMBER);
2963 generator.GenerateFast(masm_);
2964 __ B(&done);
2965
2966 __ Bind(&index_out_of_range);
2967 // When the index is out of range, the spec requires us to return
2968 // the empty string.
2969 __ LoadRoot(result, Heap::kempty_stringRootIndex);
2970 __ B(&done);
2971
2972 __ Bind(&need_conversion);
2973 // Move smi zero into the result register, which will trigger conversion.
2974 __ Mov(result, Smi::FromInt(0));
2975 __ B(&done);
2976
2977 NopRuntimeCallHelper call_helper;
2978 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2979
2980 __ Bind(&done);
2981 context()->Plug(result);
2982}
2983
2984
2985void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2986 ASM_LOCATION("FullCodeGenerator::EmitCall");
2987 ZoneList<Expression*>* args = expr->arguments();
2988 DCHECK_LE(2, args->length());
2989 // Push target, receiver and arguments onto the stack.
2990 for (Expression* const arg : *args) {
2991 VisitForStackValue(arg);
2992 }
Ben Murdochc5610432016-08-08 18:44:38 +01002993 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002994 // Move target to x1.
2995 int const argc = args->length() - 2;
2996 __ Peek(x1, (argc + 1) * kXRegSize);
2997 // Call the target.
2998 __ Mov(x0, argc);
2999 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003000 OperandStackDepthDecrement(argc + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003001 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003002 // Discard the function left on TOS.
3003 context()->DropAndPlug(1, x0);
3004}
3005
3006
3007void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3008 ZoneList<Expression*>* args = expr->arguments();
3009 VisitForAccumulatorValue(args->at(0));
3010
3011 Label materialize_true, materialize_false;
3012 Label* if_true = NULL;
3013 Label* if_false = NULL;
3014 Label* fall_through = NULL;
3015 context()->PrepareTest(&materialize_true, &materialize_false,
3016 &if_true, &if_false, &fall_through);
3017
3018 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3019 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3020 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3021 Split(eq, if_true, if_false, fall_through);
3022
3023 context()->Plug(if_true, if_false);
3024}
3025
3026
3027void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3028 ZoneList<Expression*>* args = expr->arguments();
3029 DCHECK(args->length() == 1);
3030 VisitForAccumulatorValue(args->at(0));
3031
3032 __ AssertString(x0);
3033
3034 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3035 __ IndexFromHash(x10, x0);
3036
3037 context()->Plug(x0);
3038}
3039
3040
3041void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3042 ZoneList<Expression*>* args = expr->arguments();
3043 DCHECK_EQ(1, args->length());
3044 VisitForAccumulatorValue(args->at(0));
3045 __ AssertFunction(x0);
3046 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3047 __ Ldr(x0, FieldMemOperand(x0, Map::kPrototypeOffset));
3048 context()->Plug(x0);
3049}
3050
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003051void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3052 DCHECK(expr->arguments()->length() == 0);
3053 ExternalReference debug_is_active =
3054 ExternalReference::debug_is_active_address(isolate());
3055 __ Mov(x10, debug_is_active);
3056 __ Ldrb(x0, MemOperand(x10));
3057 __ SmiTag(x0);
3058 context()->Plug(x0);
3059}
3060
3061
3062void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3063 ZoneList<Expression*>* args = expr->arguments();
3064 DCHECK_EQ(2, args->length());
3065 VisitForStackValue(args->at(0));
3066 VisitForStackValue(args->at(1));
3067
3068 Label runtime, done;
3069
3070 Register result = x0;
Ben Murdochc5610432016-08-08 18:44:38 +01003071 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime,
3072 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003073 Register map_reg = x1;
3074 Register result_value = x2;
3075 Register boolean_done = x3;
3076 Register empty_fixed_array = x4;
3077 Register untagged_result = x5;
3078 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
3079 __ Pop(boolean_done);
3080 __ Pop(result_value);
3081 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3082 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3083 JSObject::kElementsOffset);
3084 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3085 JSIteratorResult::kDoneOffset);
3086 __ ObjectUntag(untagged_result, result);
3087 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3088 __ Stp(empty_fixed_array, empty_fixed_array,
3089 MemOperand(untagged_result, JSObject::kPropertiesOffset));
3090 __ Stp(result_value, boolean_done,
3091 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3092 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3093 __ B(&done);
3094
3095 __ Bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003096 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003097
3098 __ Bind(&done);
3099 context()->Plug(x0);
3100}
3101
3102
3103void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003104 // Push function.
3105 __ LoadNativeContextSlot(expr->context_index(), x0);
3106 PushOperand(x0);
3107
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003108 // Push undefined as the receiver.
3109 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003110 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003111}
3112
3113
3114void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3115 ZoneList<Expression*>* args = expr->arguments();
3116 int arg_count = args->length();
3117
3118 SetCallPosition(expr);
3119 __ Peek(x1, (arg_count + 1) * kPointerSize);
3120 __ Mov(x0, arg_count);
3121 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3122 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003123 OperandStackDepthDecrement(arg_count + 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003124 RestoreContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003125}
3126
3127
3128void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3129 switch (expr->op()) {
3130 case Token::DELETE: {
3131 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3132 Property* property = expr->expression()->AsProperty();
3133 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3134
3135 if (property != NULL) {
3136 VisitForStackValue(property->obj());
3137 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003138 CallRuntimeWithOperands(is_strict(language_mode())
3139 ? Runtime::kDeleteProperty_Strict
3140 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003141 context()->Plug(x0);
3142 } else if (proxy != NULL) {
3143 Variable* var = proxy->var();
3144 // Delete of an unqualified identifier is disallowed in strict mode but
3145 // "delete this" is allowed.
3146 bool is_this = var->HasThisName(isolate());
3147 DCHECK(is_sloppy(language_mode()) || is_this);
3148 if (var->IsUnallocatedOrGlobalSlot()) {
3149 __ LoadGlobalObject(x12);
3150 __ Mov(x11, Operand(var->name()));
3151 __ Push(x12, x11);
3152 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3153 context()->Plug(x0);
3154 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3155 // Result of deleting non-global, non-dynamic variables is false.
3156 // The subexpression does not have side effects.
3157 context()->Plug(is_this);
3158 } else {
3159 // Non-global variable. Call the runtime to try to delete from the
3160 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003161 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003162 __ CallRuntime(Runtime::kDeleteLookupSlot);
3163 context()->Plug(x0);
3164 }
3165 } else {
3166 // Result of deleting non-property, non-variable reference is true.
3167 // The subexpression may have side effects.
3168 VisitForEffect(expr->expression());
3169 context()->Plug(true);
3170 }
3171 break;
3172 break;
3173 }
3174 case Token::VOID: {
3175 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3176 VisitForEffect(expr->expression());
3177 context()->Plug(Heap::kUndefinedValueRootIndex);
3178 break;
3179 }
3180 case Token::NOT: {
3181 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3182 if (context()->IsEffect()) {
3183 // Unary NOT has no side effects so it's only necessary to visit the
3184 // subexpression. Match the optimizing compiler by not branching.
3185 VisitForEffect(expr->expression());
3186 } else if (context()->IsTest()) {
3187 const TestContext* test = TestContext::cast(context());
3188 // The labels are swapped for the recursive call.
3189 VisitForControl(expr->expression(),
3190 test->false_label(),
3191 test->true_label(),
3192 test->fall_through());
3193 context()->Plug(test->true_label(), test->false_label());
3194 } else {
3195 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3196 // TODO(jbramley): This could be much more efficient using (for
3197 // example) the CSEL instruction.
3198 Label materialize_true, materialize_false, done;
3199 VisitForControl(expr->expression(),
3200 &materialize_false,
3201 &materialize_true,
3202 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003203 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003204
3205 __ Bind(&materialize_true);
Ben Murdochc5610432016-08-08 18:44:38 +01003206 PrepareForBailoutForId(expr->MaterializeTrueId(),
3207 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003208 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3209 __ B(&done);
3210
3211 __ Bind(&materialize_false);
Ben Murdochc5610432016-08-08 18:44:38 +01003212 PrepareForBailoutForId(expr->MaterializeFalseId(),
3213 BailoutState::NO_REGISTERS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003214 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3215 __ B(&done);
3216
3217 __ Bind(&done);
3218 if (context()->IsStackValue()) {
3219 __ Push(result_register());
3220 }
3221 }
3222 break;
3223 }
3224 case Token::TYPEOF: {
3225 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3226 {
3227 AccumulatorValueContext context(this);
3228 VisitForTypeofValue(expr->expression());
3229 }
3230 __ Mov(x3, x0);
3231 TypeofStub typeof_stub(isolate());
3232 __ CallStub(&typeof_stub);
3233 context()->Plug(x0);
3234 break;
3235 }
3236 default:
3237 UNREACHABLE();
3238 }
3239}
3240
3241
3242void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3243 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3244
3245 Comment cmnt(masm_, "[ CountOperation");
3246
3247 Property* prop = expr->expression()->AsProperty();
3248 LhsKind assign_type = Property::GetAssignType(prop);
3249
3250 // Evaluate expression and get value.
3251 if (assign_type == VARIABLE) {
3252 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3253 AccumulatorValueContext context(this);
3254 EmitVariableLoad(expr->expression()->AsVariableProxy());
3255 } else {
3256 // Reserve space for result of postfix operation.
3257 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003258 PushOperand(xzr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003259 }
3260 switch (assign_type) {
3261 case NAMED_PROPERTY: {
3262 // Put the object both on the stack and in the register.
3263 VisitForStackValue(prop->obj());
3264 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
3265 EmitNamedPropertyLoad(prop);
3266 break;
3267 }
3268
3269 case NAMED_SUPER_PROPERTY: {
3270 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3271 VisitForAccumulatorValue(
3272 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003273 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003274 const Register scratch = x10;
3275 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003276 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003277 EmitNamedSuperPropertyLoad(prop);
3278 break;
3279 }
3280
3281 case KEYED_SUPER_PROPERTY: {
3282 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3283 VisitForStackValue(
3284 prop->obj()->AsSuperPropertyReference()->home_object());
3285 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003286 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003287 const Register scratch1 = x10;
3288 const Register scratch2 = x11;
3289 __ Peek(scratch1, 2 * kPointerSize);
3290 __ Peek(scratch2, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003291 PushOperands(scratch1, scratch2, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003292 EmitKeyedSuperPropertyLoad(prop);
3293 break;
3294 }
3295
3296 case KEYED_PROPERTY: {
3297 VisitForStackValue(prop->obj());
3298 VisitForStackValue(prop->key());
3299 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
3300 __ Peek(LoadDescriptor::NameRegister(), 0);
3301 EmitKeyedPropertyLoad(prop);
3302 break;
3303 }
3304
3305 case VARIABLE:
3306 UNREACHABLE();
3307 }
3308 }
3309
3310 // We need a second deoptimization point after loading the value
3311 // in case evaluating the property load my have a side effect.
3312 if (assign_type == VARIABLE) {
Ben Murdochc5610432016-08-08 18:44:38 +01003313 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003314 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01003315 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003316 }
3317
3318 // Inline smi case if we are in a loop.
3319 Label stub_call, done;
3320 JumpPatchSite patch_site(masm_);
3321
3322 int count_value = expr->op() == Token::INC ? 1 : -1;
3323 if (ShouldInlineSmiCase(expr->op())) {
3324 Label slow;
3325 patch_site.EmitJumpIfNotSmi(x0, &slow);
3326
3327 // Save result for postfix expressions.
3328 if (expr->is_postfix()) {
3329 if (!context()->IsEffect()) {
3330 // Save the result on the stack. If we have a named or keyed property we
3331 // store the result under the receiver that is currently on top of the
3332 // stack.
3333 switch (assign_type) {
3334 case VARIABLE:
3335 __ Push(x0);
3336 break;
3337 case NAMED_PROPERTY:
3338 __ Poke(x0, kPointerSize);
3339 break;
3340 case NAMED_SUPER_PROPERTY:
3341 __ Poke(x0, kPointerSize * 2);
3342 break;
3343 case KEYED_PROPERTY:
3344 __ Poke(x0, kPointerSize * 2);
3345 break;
3346 case KEYED_SUPER_PROPERTY:
3347 __ Poke(x0, kPointerSize * 3);
3348 break;
3349 }
3350 }
3351 }
3352
3353 __ Adds(x0, x0, Smi::FromInt(count_value));
3354 __ B(vc, &done);
3355 // Call stub. Undo operation first.
3356 __ Sub(x0, x0, Smi::FromInt(count_value));
3357 __ B(&stub_call);
3358 __ Bind(&slow);
3359 }
Ben Murdochda12d292016-06-02 14:46:10 +01003360
3361 // Convert old value into a number.
3362 ToNumberStub convert_stub(isolate());
3363 __ CallStub(&convert_stub);
Ben Murdochc5610432016-08-08 18:44:38 +01003364 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003365
3366 // Save result for postfix expressions.
3367 if (expr->is_postfix()) {
3368 if (!context()->IsEffect()) {
3369 // Save the result on the stack. If we have a named or keyed property
3370 // we store the result under the receiver that is currently on top
3371 // of the stack.
3372 switch (assign_type) {
3373 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003374 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003375 break;
3376 case NAMED_PROPERTY:
3377 __ Poke(x0, kXRegSize);
3378 break;
3379 case NAMED_SUPER_PROPERTY:
3380 __ Poke(x0, 2 * kXRegSize);
3381 break;
3382 case KEYED_PROPERTY:
3383 __ Poke(x0, 2 * kXRegSize);
3384 break;
3385 case KEYED_SUPER_PROPERTY:
3386 __ Poke(x0, 3 * kXRegSize);
3387 break;
3388 }
3389 }
3390 }
3391
3392 __ Bind(&stub_call);
3393 __ Mov(x1, x0);
3394 __ Mov(x0, Smi::FromInt(count_value));
3395
3396 SetExpressionPosition(expr);
3397
3398 {
3399 Assembler::BlockPoolsScope scope(masm_);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003400 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003401 CallIC(code, expr->CountBinOpFeedbackId());
3402 patch_site.EmitPatchInfo();
3403 }
3404 __ Bind(&done);
3405
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003406 // Store the value returned in x0.
3407 switch (assign_type) {
3408 case VARIABLE:
3409 if (expr->is_postfix()) {
3410 { EffectContext context(this);
3411 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3412 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003413 PrepareForBailoutForId(expr->AssignmentId(),
3414 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003415 context.Plug(x0);
3416 }
3417 // For all contexts except EffectConstant We have the result on
3418 // top of the stack.
3419 if (!context()->IsEffect()) {
3420 context()->PlugTOS();
3421 }
3422 } else {
3423 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3424 Token::ASSIGN, expr->CountSlot());
Ben Murdochc5610432016-08-08 18:44:38 +01003425 PrepareForBailoutForId(expr->AssignmentId(),
3426 BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003427 context()->Plug(x0);
3428 }
3429 break;
3430 case NAMED_PROPERTY: {
3431 __ Mov(StoreDescriptor::NameRegister(),
3432 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003433 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003434 EmitLoadStoreICSlot(expr->CountSlot());
3435 CallStoreIC();
Ben Murdochc5610432016-08-08 18:44:38 +01003436 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003437 if (expr->is_postfix()) {
3438 if (!context()->IsEffect()) {
3439 context()->PlugTOS();
3440 }
3441 } else {
3442 context()->Plug(x0);
3443 }
3444 break;
3445 }
3446 case NAMED_SUPER_PROPERTY: {
3447 EmitNamedSuperPropertyStore(prop);
3448 if (expr->is_postfix()) {
3449 if (!context()->IsEffect()) {
3450 context()->PlugTOS();
3451 }
3452 } else {
3453 context()->Plug(x0);
3454 }
3455 break;
3456 }
3457 case KEYED_SUPER_PROPERTY: {
3458 EmitKeyedSuperPropertyStore(prop);
3459 if (expr->is_postfix()) {
3460 if (!context()->IsEffect()) {
3461 context()->PlugTOS();
3462 }
3463 } else {
3464 context()->Plug(x0);
3465 }
3466 break;
3467 }
3468 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003469 PopOperand(StoreDescriptor::NameRegister());
3470 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003471 Handle<Code> ic =
3472 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3473 EmitLoadStoreICSlot(expr->CountSlot());
3474 CallIC(ic);
Ben Murdochc5610432016-08-08 18:44:38 +01003475 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003476 if (expr->is_postfix()) {
3477 if (!context()->IsEffect()) {
3478 context()->PlugTOS();
3479 }
3480 } else {
3481 context()->Plug(x0);
3482 }
3483 break;
3484 }
3485 }
3486}
3487
3488
3489void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3490 Expression* sub_expr,
3491 Handle<String> check) {
3492 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
3493 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
3494 Label materialize_true, materialize_false;
3495 Label* if_true = NULL;
3496 Label* if_false = NULL;
3497 Label* fall_through = NULL;
3498 context()->PrepareTest(&materialize_true, &materialize_false,
3499 &if_true, &if_false, &fall_through);
3500
3501 { AccumulatorValueContext context(this);
3502 VisitForTypeofValue(sub_expr);
3503 }
3504 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3505
3506 Factory* factory = isolate()->factory();
3507 if (String::Equals(check, factory->number_string())) {
3508 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
3509 __ JumpIfSmi(x0, if_true);
3510 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3511 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
3512 Split(eq, if_true, if_false, fall_through);
3513 } else if (String::Equals(check, factory->string_string())) {
3514 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
3515 __ JumpIfSmi(x0, if_false);
3516 __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
3517 Split(lt, if_true, if_false, fall_through);
3518 } else if (String::Equals(check, factory->symbol_string())) {
3519 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
3520 __ JumpIfSmi(x0, if_false);
3521 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
3522 Split(eq, if_true, if_false, fall_through);
3523 } else if (String::Equals(check, factory->boolean_string())) {
3524 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
3525 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
3526 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
3527 Split(eq, if_true, if_false, fall_through);
3528 } else if (String::Equals(check, factory->undefined_string())) {
3529 ASM_LOCATION(
3530 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
Ben Murdoch097c5b22016-05-18 11:27:45 +01003531 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003532 __ JumpIfSmi(x0, if_false);
3533 // Check for undetectable objects => true.
3534 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3535 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3536 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3537 fall_through);
3538 } else if (String::Equals(check, factory->function_string())) {
3539 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
3540 __ JumpIfSmi(x0, if_false);
3541 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3542 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3543 __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3544 __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true,
3545 if_false, fall_through);
3546 } else if (String::Equals(check, factory->object_string())) {
3547 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
3548 __ JumpIfSmi(x0, if_false);
3549 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3550 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3551 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, if_false, lt);
3552 // Check for callable or undetectable objects => false.
3553 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
3554 __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable),
3555 if_true, if_false, fall_through);
3556// clang-format off
3557#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3558 } else if (String::Equals(check, factory->type##_string())) { \
3559 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
3560 #type "_string"); \
3561 __ JumpIfSmi(x0, if_true); \
3562 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); \
3563 __ CompareRoot(x0, Heap::k##Type##MapRootIndex); \
3564 Split(eq, if_true, if_false, fall_through);
3565 SIMD128_TYPES(SIMD128_TYPE)
3566#undef SIMD128_TYPE
3567 // clang-format on
3568 } else {
3569 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
3570 if (if_false != fall_through) __ B(if_false);
3571 }
3572 context()->Plug(if_true, if_false);
3573}
3574
3575
3576void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3577 Comment cmnt(masm_, "[ CompareOperation");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003578
3579 // Try to generate an optimized comparison with a literal value.
3580 // TODO(jbramley): This only checks common values like NaN or undefined.
3581 // Should it also handle ARM64 immediate operands?
3582 if (TryLiteralCompare(expr)) {
3583 return;
3584 }
3585
3586 // Assign labels according to context()->PrepareTest.
3587 Label materialize_true;
3588 Label materialize_false;
3589 Label* if_true = NULL;
3590 Label* if_false = NULL;
3591 Label* fall_through = NULL;
3592 context()->PrepareTest(&materialize_true, &materialize_false,
3593 &if_true, &if_false, &fall_through);
3594
3595 Token::Value op = expr->op();
3596 VisitForStackValue(expr->left());
3597 switch (op) {
3598 case Token::IN:
3599 VisitForStackValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003600 SetExpressionPosition(expr);
3601 EmitHasProperty();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003602 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3603 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3604 Split(eq, if_true, if_false, fall_through);
3605 break;
3606
3607 case Token::INSTANCEOF: {
3608 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003609 SetExpressionPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003610 PopOperand(x1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003611 InstanceOfStub stub(isolate());
3612 __ CallStub(&stub);
3613 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3614 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3615 Split(eq, if_true, if_false, fall_through);
3616 break;
3617 }
3618
3619 default: {
3620 VisitForAccumulatorValue(expr->right());
Ben Murdochc5610432016-08-08 18:44:38 +01003621 SetExpressionPosition(expr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003622 Condition cond = CompareIC::ComputeCondition(op);
3623
3624 // Pop the stack value.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003625 PopOperand(x1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003626
3627 JumpPatchSite patch_site(masm_);
3628 if (ShouldInlineSmiCase(op)) {
3629 Label slow_case;
3630 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
3631 __ Cmp(x1, x0);
3632 Split(cond, if_true, if_false, NULL);
3633 __ Bind(&slow_case);
3634 }
3635
Ben Murdoch097c5b22016-05-18 11:27:45 +01003636 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003637 CallIC(ic, expr->CompareOperationFeedbackId());
3638 patch_site.EmitPatchInfo();
3639 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3640 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
3641 }
3642 }
3643
3644 // Convert the result of the comparison into one expected for this
3645 // expression's context.
3646 context()->Plug(if_true, if_false);
3647}
3648
3649
3650void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3651 Expression* sub_expr,
3652 NilValue nil) {
3653 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
3654 Label materialize_true, materialize_false;
3655 Label* if_true = NULL;
3656 Label* if_false = NULL;
3657 Label* fall_through = NULL;
3658 context()->PrepareTest(&materialize_true, &materialize_false,
3659 &if_true, &if_false, &fall_through);
3660
3661 VisitForAccumulatorValue(sub_expr);
3662 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3663
3664 if (expr->op() == Token::EQ_STRICT) {
3665 Heap::RootListIndex nil_value = nil == kNullValue ?
3666 Heap::kNullValueRootIndex :
3667 Heap::kUndefinedValueRootIndex;
3668 __ CompareRoot(x0, nil_value);
3669 Split(eq, if_true, if_false, fall_through);
3670 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003671 __ JumpIfSmi(x0, if_false);
3672 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3673 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3674 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3675 fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003676 }
3677
3678 context()->Plug(if_true, if_false);
3679}
3680
3681
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003682void FullCodeGenerator::VisitYield(Yield* expr) {
3683 Comment cmnt(masm_, "[ Yield");
3684 SetExpressionPosition(expr);
3685
3686 // Evaluate yielded value first; the initial iterator definition depends on
3687 // this. It stays on the stack while we update the iterator.
3688 VisitForStackValue(expr->expression());
3689
3690 // TODO(jbramley): Tidy this up once the merge is done, using named registers
3691 // and suchlike. The implementation changes a little by bleeding_edge so I
3692 // don't want to spend too much time on it now.
3693
Ben Murdochc5610432016-08-08 18:44:38 +01003694 Label suspend, continuation, post_runtime, resume, exception;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003695
Ben Murdochda12d292016-06-02 14:46:10 +01003696 __ B(&suspend);
3697 // TODO(jbramley): This label is bound here because the following code
3698 // looks at its pos(). Is it possible to do something more efficient here,
3699 // perhaps using Adr?
3700 __ Bind(&continuation);
Ben Murdochc5610432016-08-08 18:44:38 +01003701 // When we arrive here, x0 holds the generator object.
Ben Murdochda12d292016-06-02 14:46:10 +01003702 __ RecordGeneratorContinuation();
Ben Murdochc5610432016-08-08 18:44:38 +01003703 __ Ldr(x1, FieldMemOperand(x0, JSGeneratorObject::kResumeModeOffset));
3704 __ Ldr(x0, FieldMemOperand(x0, JSGeneratorObject::kInputOffset));
3705 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
3706 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
3707 __ Cmp(x1, Operand(Smi::FromInt(JSGeneratorObject::kReturn)));
3708 __ B(lt, &resume);
Ben Murdochda12d292016-06-02 14:46:10 +01003709 __ Push(result_register());
Ben Murdochc5610432016-08-08 18:44:38 +01003710 __ B(gt, &exception);
Ben Murdochda12d292016-06-02 14:46:10 +01003711 EmitCreateIteratorResult(true);
3712 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003713
Ben Murdochc5610432016-08-08 18:44:38 +01003714 __ Bind(&exception);
3715 __ CallRuntime(Runtime::kThrow);
3716
Ben Murdochda12d292016-06-02 14:46:10 +01003717 __ Bind(&suspend);
3718 OperandStackDepthIncrement(1); // Not popped on this path.
3719 VisitForAccumulatorValue(expr->generator_object());
3720 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
3721 __ Mov(x1, Smi::FromInt(continuation.pos()));
3722 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
3723 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
3724 __ Mov(x1, cp);
3725 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
3726 kLRHasBeenSaved, kDontSaveFPRegs);
3727 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
3728 __ Cmp(__ StackPointer(), x1);
3729 __ B(eq, &post_runtime);
3730 __ Push(x0); // generator object
3731 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
Ben Murdochc5610432016-08-08 18:44:38 +01003732 RestoreContext();
Ben Murdochda12d292016-06-02 14:46:10 +01003733 __ Bind(&post_runtime);
3734 PopOperand(result_register());
3735 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003736
Ben Murdochda12d292016-06-02 14:46:10 +01003737 __ Bind(&resume);
3738 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003739}
3740
Ben Murdoch097c5b22016-05-18 11:27:45 +01003741void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
3742 OperandStackDepthIncrement(2);
3743 __ Push(reg1, reg2);
3744}
3745
3746void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
3747 Register reg3) {
3748 OperandStackDepthIncrement(3);
3749 __ Push(reg1, reg2, reg3);
3750}
3751
3752void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
3753 OperandStackDepthDecrement(2);
3754 __ Pop(reg1, reg2);
3755}
3756
3757void FullCodeGenerator::EmitOperandStackDepthCheck() {
3758 if (FLAG_debug_code) {
3759 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
3760 operand_stack_depth_ * kPointerSize;
3761 __ Sub(x0, fp, jssp);
3762 __ Cmp(x0, Operand(expected_diff));
3763 __ Assert(eq, kUnexpectedStackDepth);
3764 }
3765}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003766
3767void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
3768 Label allocate, done_allocate;
3769
3770 // Allocate and populate an object with this form: { value: VAL, done: DONE }
3771
3772 Register result = x0;
Ben Murdochc5610432016-08-08 18:44:38 +01003773 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate,
3774 NO_ALLOCATION_FLAGS);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003775 __ B(&done_allocate);
3776
3777 __ Bind(&allocate);
3778 __ Push(Smi::FromInt(JSIteratorResult::kSize));
3779 __ CallRuntime(Runtime::kAllocateInNewSpace);
3780
3781 __ Bind(&done_allocate);
3782 Register map_reg = x1;
3783 Register result_value = x2;
3784 Register boolean_done = x3;
3785 Register empty_fixed_array = x4;
3786 Register untagged_result = x5;
3787 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
Ben Murdochda12d292016-06-02 14:46:10 +01003788 PopOperand(result_value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003789 __ LoadRoot(boolean_done,
3790 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
3791 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3792 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3793 JSObject::kElementsOffset);
3794 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3795 JSIteratorResult::kDoneOffset);
3796 __ ObjectUntag(untagged_result, result);
3797 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3798 __ Stp(empty_fixed_array, empty_fixed_array,
3799 MemOperand(untagged_result, JSObject::kPropertiesOffset));
3800 __ Stp(result_value, boolean_done,
3801 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3802 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3803}
3804
3805
3806// TODO(all): I don't like this method.
3807// It seems to me that in too many places x0 is used in place of this.
3808// Also, this function is not suitable for all places where x0 should be
3809// abstracted (eg. when used as an argument). But some places assume that the
3810// first argument register is x0, and use this function instead.
3811// Considering that most of the register allocation is hard-coded in the
3812// FullCodeGen, that it is unlikely we will need to change it extensively, and
3813// that abstracting the allocation through functions would not yield any
3814// performance benefit, I think the existence of this function is debatable.
3815Register FullCodeGenerator::result_register() {
3816 return x0;
3817}
3818
3819
3820Register FullCodeGenerator::context_register() {
3821 return cp;
3822}
3823
Ben Murdochda12d292016-06-02 14:46:10 +01003824void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3825 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3826 __ Ldr(value, MemOperand(fp, frame_offset));
3827}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003828
3829void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3830 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3831 __ Str(value, MemOperand(fp, frame_offset));
3832}
3833
3834
3835void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3836 __ Ldr(dst, ContextMemOperand(cp, context_index));
3837}
3838
3839
3840void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3841 Scope* closure_scope = scope()->ClosureScope();
3842 if (closure_scope->is_script_scope() ||
3843 closure_scope->is_module_scope()) {
3844 // Contexts nested in the native context have a canonical empty function
3845 // as their closure, not the anonymous closure containing the global
3846 // code.
3847 DCHECK(kSmiTag == 0);
3848 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, x10);
3849 } else if (closure_scope->is_eval_scope()) {
3850 // Contexts created by a call to eval have the same closure as the
3851 // context calling eval, not the anonymous closure containing the eval
3852 // code. Fetch it from the context.
3853 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3854 } else {
3855 DCHECK(closure_scope->is_function_scope());
3856 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3857 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003858 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003859}
3860
3861
3862void FullCodeGenerator::EnterFinallyBlock() {
3863 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
3864 DCHECK(!result_register().is(x10));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003865 // Store pending message while executing finally block.
3866 ExternalReference pending_message_obj =
3867 ExternalReference::address_of_pending_message_obj(isolate());
3868 __ Mov(x10, pending_message_obj);
3869 __ Ldr(x10, MemOperand(x10));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003870 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003871
3872 ClearPendingMessage();
3873}
3874
3875
3876void FullCodeGenerator::ExitFinallyBlock() {
3877 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
3878 DCHECK(!result_register().is(x10));
3879
3880 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003881 PopOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003882 ExternalReference pending_message_obj =
3883 ExternalReference::address_of_pending_message_obj(isolate());
3884 __ Mov(x13, pending_message_obj);
3885 __ Str(x10, MemOperand(x13));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003886}
3887
3888
3889void FullCodeGenerator::ClearPendingMessage() {
3890 DCHECK(!result_register().is(x10));
3891 ExternalReference pending_message_obj =
3892 ExternalReference::address_of_pending_message_obj(isolate());
3893 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
3894 __ Mov(x13, pending_message_obj);
3895 __ Str(x10, MemOperand(x13));
3896}
3897
3898
Ben Murdoch097c5b22016-05-18 11:27:45 +01003899void FullCodeGenerator::DeferredCommands::EmitCommands() {
3900 __ Pop(result_register(), x1); // Restore the accumulator and get the token.
3901 for (DeferredCommand cmd : commands_) {
3902 Label skip;
3903 __ Cmp(x1, Operand(Smi::FromInt(cmd.token)));
3904 __ B(ne, &skip);
3905 switch (cmd.command) {
3906 case kReturn:
3907 codegen_->EmitUnwindAndReturn();
3908 break;
3909 case kThrow:
3910 __ Push(result_register());
3911 __ CallRuntime(Runtime::kReThrow);
3912 break;
3913 case kContinue:
3914 codegen_->EmitContinue(cmd.target);
3915 break;
3916 case kBreak:
3917 codegen_->EmitBreak(cmd.target);
3918 break;
3919 }
3920 __ bind(&skip);
3921 }
3922}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003923
3924#undef __
3925
3926
3927void BackEdgeTable::PatchAt(Code* unoptimized_code,
3928 Address pc,
3929 BackEdgeState target_state,
3930 Code* replacement_code) {
3931 // Turn the jump into a nop.
3932 Address branch_address = pc - 3 * kInstructionSize;
3933 Isolate* isolate = unoptimized_code->GetIsolate();
3934 PatchingAssembler patcher(isolate, branch_address, 1);
3935
3936 DCHECK(Instruction::Cast(branch_address)
3937 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
3938 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
3939 Instruction::Cast(branch_address)->ImmPCOffset() ==
3940 6 * kInstructionSize));
3941
3942 switch (target_state) {
3943 case INTERRUPT:
3944 // <decrement profiling counter>
3945 // .. .. .. .. b.pl ok
3946 // .. .. .. .. ldr x16, pc+<interrupt stub address>
3947 // .. .. .. .. blr x16
3948 // ... more instructions.
3949 // ok-label
3950 // Jump offset is 6 instructions.
3951 patcher.b(6, pl);
3952 break;
3953 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003954 // <decrement profiling counter>
3955 // .. .. .. .. mov x0, x0 (NOP)
3956 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
3957 // .. .. .. .. blr x16
3958 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
3959 break;
3960 }
3961
3962 // Replace the call address.
3963 Instruction* load = Instruction::Cast(pc)->preceding(2);
3964 Address interrupt_address_pointer =
3965 reinterpret_cast<Address>(load) + load->ImmPCOffset();
3966 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
3967 reinterpret_cast<uint64_t>(
3968 isolate->builtins()->OnStackReplacement()->entry())) ||
3969 (Memory::uint64_at(interrupt_address_pointer) ==
3970 reinterpret_cast<uint64_t>(
3971 isolate->builtins()->InterruptCheck()->entry())) ||
3972 (Memory::uint64_at(interrupt_address_pointer) ==
3973 reinterpret_cast<uint64_t>(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003974 isolate->builtins()->OnStackReplacement()->entry())));
3975 Memory::uint64_at(interrupt_address_pointer) =
3976 reinterpret_cast<uint64_t>(replacement_code->entry());
3977
3978 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3979 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
3980}
3981
3982
3983BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3984 Isolate* isolate,
3985 Code* unoptimized_code,
3986 Address pc) {
3987 // TODO(jbramley): There should be some extra assertions here (as in the ARM
3988 // back-end), but this function is gone in bleeding_edge so it might not
3989 // matter anyway.
3990 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
3991
3992 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
3993 Instruction* load = Instruction::Cast(pc)->preceding(2);
3994 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
3995 load->ImmPCOffset());
3996 if (entry == reinterpret_cast<uint64_t>(
3997 isolate->builtins()->OnStackReplacement()->entry())) {
3998 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003999 } else {
4000 UNREACHABLE();
4001 }
4002 }
4003
4004 return INTERRUPT;
4005}
4006
4007
4008} // namespace internal
4009} // namespace v8
4010
4011#endif // V8_TARGET_ARCH_ARM64