blob: d0278e742199519cc9cc226992b5654bf5f38199 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM64
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/arm64/code-stubs-arm64.h"
17#include "src/arm64/frames-arm64.h"
18#include "src/arm64/macro-assembler-arm64.h"
19
20namespace v8 {
21namespace internal {
22
Ben Murdoch097c5b22016-05-18 11:27:45 +010023#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024
25class JumpPatchSite BASE_EMBEDDED {
26 public:
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
28#ifdef DEBUG
29 info_emitted_ = false;
30#endif
31 }
32
33 ~JumpPatchSite() {
34 if (patch_site_.is_bound()) {
35 DCHECK(info_emitted_);
36 } else {
37 DCHECK(reg_.IsNone());
38 }
39 }
40
41 void EmitJumpIfNotSmi(Register reg, Label* target) {
42 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
43 InstructionAccurateScope scope(masm_, 1);
44 DCHECK(!info_emitted_);
45 DCHECK(reg.Is64Bits());
46 DCHECK(!reg.Is(csp));
47 reg_ = reg;
48 __ bind(&patch_site_);
49 __ tbz(xzr, 0, target); // Always taken before patched.
50 }
51
52 void EmitJumpIfSmi(Register reg, Label* target) {
53 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
54 InstructionAccurateScope scope(masm_, 1);
55 DCHECK(!info_emitted_);
56 DCHECK(reg.Is64Bits());
57 DCHECK(!reg.Is(csp));
58 reg_ = reg;
59 __ bind(&patch_site_);
60 __ tbnz(xzr, 0, target); // Never taken before patched.
61 }
62
63 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
64 UseScratchRegisterScope temps(masm_);
65 Register temp = temps.AcquireX();
66 __ Orr(temp, reg1, reg2);
67 EmitJumpIfNotSmi(temp, target);
68 }
69
70 void EmitPatchInfo() {
71 Assembler::BlockPoolsScope scope(masm_);
72 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
73#ifdef DEBUG
74 info_emitted_ = true;
75#endif
76 }
77
78 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010079 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 MacroAssembler* masm_;
81 Label patch_site_;
82 Register reg_;
83#ifdef DEBUG
84 bool info_emitted_;
85#endif
86};
87
88
89// Generate code for a JS function. On entry to the function the receiver
90// and arguments have been pushed on the stack left to right. The actual
91// argument count matches the formal parameter count expected by the
92// function.
93//
94// The live registers are:
95// - x1: the JS function object being called (i.e. ourselves).
96// - x3: the new target value
97// - cp: our context.
98// - fp: our caller's frame pointer.
99// - jssp: stack pointer.
100// - lr: return address.
101//
102// The function builds a JS frame. See JavaScriptFrameConstants in
103// frames-arm.h for its layout.
104void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ Function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
115 __ Peek(x10, receiver_offset);
116 __ AssertNotSmi(x10);
117 __ CompareObjectType(x10, x10, x11, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack.
122 // The MANUAL indicates that the scope shouldn't actually generate code
123 // to set up the frame because we do it manually below.
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125
126 // This call emits the following sequence in a way that can be patched for
127 // code ageing support:
128 // Push(lr, fp, cp, x1);
129 // Add(fp, jssp, 2 * kPointerSize);
130 info->set_prologue_offset(masm_->pc_offset());
131 __ Prologue(info->GeneratePreagedPrologue());
132
133 // Reserve space on the stack for locals.
134 { Comment cmnt(masm_, "[ Allocate locals");
135 int locals_count = info->scope()->num_stack_slots();
136 // Generators allocate locals, if any, in context slots.
137 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100138 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139 if (locals_count > 0) {
140 if (locals_count >= 128) {
141 Label ok;
142 DCHECK(jssp.Is(__ StackPointer()));
143 __ Sub(x10, jssp, locals_count * kPointerSize);
144 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
145 __ B(hs, &ok);
146 __ CallRuntime(Runtime::kThrowStackOverflow);
147 __ Bind(&ok);
148 }
149 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
150 if (FLAG_optimize_for_size) {
151 __ PushMultipleTimes(x10 , locals_count);
152 } else {
153 const int kMaxPushes = 32;
154 if (locals_count >= kMaxPushes) {
155 int loop_iterations = locals_count / kMaxPushes;
156 __ Mov(x2, loop_iterations);
157 Label loop_header;
158 __ Bind(&loop_header);
159 // Do pushes.
160 __ PushMultipleTimes(x10 , kMaxPushes);
161 __ Subs(x2, x2, 1);
162 __ B(ne, &loop_header);
163 }
164 int remaining = locals_count % kMaxPushes;
165 // Emit the remaining pushes.
166 __ PushMultipleTimes(x10 , remaining);
167 }
168 }
169 }
170
171 bool function_in_register_x1 = true;
172
173 if (info->scope()->num_heap_slots() > 0) {
174 // Argument to NewContext is the function, which is still in x1.
175 Comment cmnt(masm_, "[ Allocate context");
176 bool need_write_barrier = true;
177 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178 if (info->scope()->is_script_scope()) {
179 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
180 __ Push(x1, x10);
181 __ CallRuntime(Runtime::kNewScriptContext);
182 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
183 // The new target value is not used, clobbering is safe.
184 DCHECK_NULL(info->scope()->new_target_var());
185 } else {
186 if (info->scope()->new_target_var() != nullptr) {
187 __ Push(x3); // Preserve new target.
188 }
189 if (slots <= FastNewContextStub::kMaximumSlots) {
190 FastNewContextStub stub(isolate(), slots);
191 __ CallStub(&stub);
192 // Result of FastNewContextStub is always in new space.
193 need_write_barrier = false;
194 } else {
195 __ Push(x1);
196 __ CallRuntime(Runtime::kNewFunctionContext);
197 }
198 if (info->scope()->new_target_var() != nullptr) {
199 __ Pop(x3); // Restore new target.
200 }
201 }
202 function_in_register_x1 = false;
203 // Context is returned in x0. It replaces the context passed to us.
204 // It's saved in the stack and kept live in cp.
205 __ Mov(cp, x0);
206 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
207 // Copy any necessary parameters into the context.
208 int num_parameters = info->scope()->num_parameters();
209 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
210 for (int i = first_parameter; i < num_parameters; i++) {
211 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
212 if (var->IsContextSlot()) {
213 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
214 (num_parameters - 1 - i) * kPointerSize;
215 // Load parameter from stack.
216 __ Ldr(x10, MemOperand(fp, parameter_offset));
217 // Store it in the context.
218 MemOperand target = ContextMemOperand(cp, var->index());
219 __ Str(x10, target);
220
221 // Update the write barrier.
222 if (need_write_barrier) {
223 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
224 x11, kLRHasBeenSaved, kDontSaveFPRegs);
225 } else if (FLAG_debug_code) {
226 Label done;
227 __ JumpIfInNewSpace(cp, &done);
228 __ Abort(kExpectedNewSpaceObject);
229 __ bind(&done);
230 }
231 }
232 }
233 }
234
235 // Register holding this function and new target are both trashed in case we
236 // bailout here. But since that can happen only when new target is not used
237 // and we allocate a context, the value of |function_in_register| is correct.
238 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
239
240 // Possibly set up a local binding to the this function which is used in
241 // derived constructors with super calls.
242 Variable* this_function_var = scope()->this_function_var();
243 if (this_function_var != nullptr) {
244 Comment cmnt(masm_, "[ This function");
245 if (!function_in_register_x1) {
246 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
247 // The write barrier clobbers register again, keep it marked as such.
248 }
249 SetVar(this_function_var, x1, x0, x2);
250 }
251
252 // Possibly set up a local binding to the new target value.
253 Variable* new_target_var = scope()->new_target_var();
254 if (new_target_var != nullptr) {
255 Comment cmnt(masm_, "[ new.target");
256 SetVar(new_target_var, x3, x0, x2);
257 }
258
259 // Possibly allocate RestParameters
260 int rest_index;
261 Variable* rest_param = scope()->rest_parameter(&rest_index);
262 if (rest_param) {
263 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100264 if (!function_in_register_x1) {
265 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
266 }
267 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 function_in_register_x1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 SetVar(rest_param, x0, x1, x2);
271 }
272
273 Variable* arguments = scope()->arguments();
274 if (arguments != NULL) {
275 // Function uses arguments object.
276 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277 if (!function_in_register_x1) {
278 // Load this again, if it's used by the local context below.
279 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
280 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100281 if (is_strict(language_mode()) || !has_simple_parameters()) {
282 FastNewStrictArgumentsStub stub(isolate());
283 __ CallStub(&stub);
284 } else if (literal()->has_duplicate_parameters()) {
285 __ Push(x1);
286 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
287 } else {
288 FastNewSloppyArgumentsStub stub(isolate());
289 __ CallStub(&stub);
290 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000291
292 SetVar(arguments, x0, x1, x2);
293 }
294
295 if (FLAG_trace) {
296 __ CallRuntime(Runtime::kTraceEnter);
297 }
298
299 // Visit the declarations and body unless there is an illegal
300 // redeclaration.
301 if (scope()->HasIllegalRedeclaration()) {
302 Comment cmnt(masm_, "[ Declarations");
303 VisitForEffect(scope()->GetIllegalRedeclaration());
304
305 } else {
306 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
307 { Comment cmnt(masm_, "[ Declarations");
308 VisitDeclarations(scope()->declarations());
309 }
310
311 // Assert that the declarations do not use ICs. Otherwise the debugger
312 // won't be able to redirect a PC at an IC to the correct IC in newly
313 // recompiled code.
314 DCHECK_EQ(0, ic_total_count_);
315
316 {
317 Comment cmnt(masm_, "[ Stack check");
318 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
319 Label ok;
320 DCHECK(jssp.Is(__ StackPointer()));
321 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
322 __ B(hs, &ok);
323 PredictableCodeSizeScope predictable(masm_,
324 Assembler::kCallSizeWithRelocation);
325 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
326 __ Bind(&ok);
327 }
328
329 {
330 Comment cmnt(masm_, "[ Body");
331 DCHECK(loop_depth() == 0);
332 VisitStatements(literal()->body());
333 DCHECK(loop_depth() == 0);
334 }
335 }
336
337 // Always emit a 'return undefined' in case control fell off the end of
338 // the body.
339 { Comment cmnt(masm_, "[ return <undefined>;");
340 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
341 }
342 EmitReturnSequence();
343
344 // Force emission of the pools, so they don't get emitted in the middle
345 // of the back edge table.
346 masm()->CheckVeneerPool(true, false);
347 masm()->CheckConstPool(true, false);
348}
349
350
351void FullCodeGenerator::ClearAccumulator() {
352 __ Mov(x0, Smi::FromInt(0));
353}
354
355
356void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
357 __ Mov(x2, Operand(profiling_counter_));
358 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
359 __ Subs(x3, x3, Smi::FromInt(delta));
360 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
361}
362
363
364void FullCodeGenerator::EmitProfilingCounterReset() {
365 int reset_value = FLAG_interrupt_budget;
366 __ Mov(x2, Operand(profiling_counter_));
367 __ Mov(x3, Smi::FromInt(reset_value));
368 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
369}
370
371
372void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
373 Label* back_edge_target) {
374 DCHECK(jssp.Is(__ StackPointer()));
375 Comment cmnt(masm_, "[ Back edge bookkeeping");
376 // Block literal pools whilst emitting back edge code.
377 Assembler::BlockPoolsScope block_const_pool(masm_);
378 Label ok;
379
380 DCHECK(back_edge_target->is_bound());
381 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
382 // to reduce the absolute error due to the integer division. To do that,
383 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
384 // the result).
385 int distance =
386 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
387 kCodeSizeMultiplier / 2);
388 int weight = Min(kMaxBackEdgeWeight,
389 Max(1, distance / kCodeSizeMultiplier));
390 EmitProfilingCounterDecrement(weight);
391 __ B(pl, &ok);
392 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
393
394 // Record a mapping of this PC offset to the OSR id. This is used to find
395 // the AST id from the unoptimized code in order to use it as a key into
396 // the deoptimization input data found in the optimized code.
397 RecordBackEdge(stmt->OsrEntryId());
398
399 EmitProfilingCounterReset();
400
401 __ Bind(&ok);
402 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
403 // Record a mapping of the OSR id to this PC. This is used if the OSR
404 // entry becomes the target of a bailout. We don't expect it to be, but
405 // we want it to work if it is.
406 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
407}
408
Ben Murdoch097c5b22016-05-18 11:27:45 +0100409void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
410 bool is_tail_call) {
411 // Pretend that the exit is a backwards jump to the entry.
412 int weight = 1;
413 if (info_->ShouldSelfOptimize()) {
414 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
415 } else {
416 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
417 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
418 }
419 EmitProfilingCounterDecrement(weight);
420 Label ok;
421 __ B(pl, &ok);
422 // Don't need to save result register if we are going to do a tail call.
423 if (!is_tail_call) {
424 __ Push(x0);
425 }
426 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
427 if (!is_tail_call) {
428 __ Pop(x0);
429 }
430 EmitProfilingCounterReset();
431 __ Bind(&ok);
432}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433
434void FullCodeGenerator::EmitReturnSequence() {
435 Comment cmnt(masm_, "[ Return sequence");
436
437 if (return_label_.is_bound()) {
438 __ B(&return_label_);
439
440 } else {
441 __ Bind(&return_label_);
442 if (FLAG_trace) {
443 // Push the return value on the stack as the parameter.
444 // Runtime::TraceExit returns its parameter in x0.
445 __ Push(result_register());
446 __ CallRuntime(Runtime::kTraceExit);
447 DCHECK(x0.Is(result_register()));
448 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100449 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000450
451 SetReturnPosition(literal());
452 const Register& current_sp = __ StackPointer();
453 // Nothing ensures 16 bytes alignment here.
454 DCHECK(!current_sp.Is(csp));
455 __ Mov(current_sp, fp);
456 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
457 // Drop the arguments and receiver and return.
458 // TODO(all): This implementation is overkill as it supports 2**31+1
459 // arguments, consider how to improve it without creating a security
460 // hole.
461 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
462 __ Add(current_sp, current_sp, ip0);
463 __ Ret();
464 int32_t arg_count = info_->scope()->num_parameters() + 1;
465 __ dc64(kXRegSize * arg_count);
466 }
467}
468
469
470void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
471 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
472 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100473 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000474}
475
476
477void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
478 // Root values have no side effects.
479}
480
481
482void FullCodeGenerator::AccumulatorValueContext::Plug(
483 Heap::RootListIndex index) const {
484 __ LoadRoot(result_register(), index);
485}
486
487
488void FullCodeGenerator::StackValueContext::Plug(
489 Heap::RootListIndex index) const {
490 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100491 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000492}
493
494
495void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
496 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
497 false_label_);
498 if (index == Heap::kUndefinedValueRootIndex ||
499 index == Heap::kNullValueRootIndex ||
500 index == Heap::kFalseValueRootIndex) {
501 if (false_label_ != fall_through_) __ B(false_label_);
502 } else if (index == Heap::kTrueValueRootIndex) {
503 if (true_label_ != fall_through_) __ B(true_label_);
504 } else {
505 __ LoadRoot(result_register(), index);
506 codegen()->DoTest(this);
507 }
508}
509
510
511void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
512}
513
514
515void FullCodeGenerator::AccumulatorValueContext::Plug(
516 Handle<Object> lit) const {
517 __ Mov(result_register(), Operand(lit));
518}
519
520
521void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
522 // Immediates cannot be pushed directly.
523 __ Mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100524 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000525}
526
527
528void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
529 codegen()->PrepareForBailoutBeforeSplit(condition(),
530 true,
531 true_label_,
532 false_label_);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100533 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
535 if (false_label_ != fall_through_) __ B(false_label_);
536 } else if (lit->IsTrue() || lit->IsJSObject()) {
537 if (true_label_ != fall_through_) __ B(true_label_);
538 } else if (lit->IsString()) {
539 if (String::cast(*lit)->length() == 0) {
540 if (false_label_ != fall_through_) __ B(false_label_);
541 } else {
542 if (true_label_ != fall_through_) __ B(true_label_);
543 }
544 } else if (lit->IsSmi()) {
545 if (Smi::cast(*lit)->value() == 0) {
546 if (false_label_ != fall_through_) __ B(false_label_);
547 } else {
548 if (true_label_ != fall_through_) __ B(true_label_);
549 }
550 } else {
551 // For simplicity we always test the accumulator register.
552 __ Mov(result_register(), Operand(lit));
553 codegen()->DoTest(this);
554 }
555}
556
557
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000558void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
559 Register reg) const {
560 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100561 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000562 __ Poke(reg, 0);
563}
564
565
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000566void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
567 Label* materialize_false) const {
568 DCHECK(materialize_true == materialize_false);
569 __ Bind(materialize_true);
570}
571
572
573void FullCodeGenerator::AccumulatorValueContext::Plug(
574 Label* materialize_true,
575 Label* materialize_false) const {
576 Label done;
577 __ Bind(materialize_true);
578 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
579 __ B(&done);
580 __ Bind(materialize_false);
581 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
582 __ Bind(&done);
583}
584
585
586void FullCodeGenerator::StackValueContext::Plug(
587 Label* materialize_true,
588 Label* materialize_false) const {
589 Label done;
590 __ Bind(materialize_true);
591 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
592 __ B(&done);
593 __ Bind(materialize_false);
594 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
595 __ Bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100596 codegen()->PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000597}
598
599
600void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
601 Label* materialize_false) const {
602 DCHECK(materialize_true == true_label_);
603 DCHECK(materialize_false == false_label_);
604}
605
606
607void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
608 Heap::RootListIndex value_root_index =
609 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
610 __ LoadRoot(result_register(), value_root_index);
611}
612
613
614void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
615 Heap::RootListIndex value_root_index =
616 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
617 __ LoadRoot(x10, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100618 codegen()->PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619}
620
621
622void FullCodeGenerator::TestContext::Plug(bool flag) const {
623 codegen()->PrepareForBailoutBeforeSplit(condition(),
624 true,
625 true_label_,
626 false_label_);
627 if (flag) {
628 if (true_label_ != fall_through_) {
629 __ B(true_label_);
630 }
631 } else {
632 if (false_label_ != fall_through_) {
633 __ B(false_label_);
634 }
635 }
636}
637
638
639void FullCodeGenerator::DoTest(Expression* condition,
640 Label* if_true,
641 Label* if_false,
642 Label* fall_through) {
643 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
644 CallIC(ic, condition->test_id());
645 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
646 Split(eq, if_true, if_false, fall_through);
647}
648
649
650// If (cond), branch to if_true.
651// If (!cond), branch to if_false.
652// fall_through is used as an optimization in cases where only one branch
653// instruction is necessary.
654void FullCodeGenerator::Split(Condition cond,
655 Label* if_true,
656 Label* if_false,
657 Label* fall_through) {
658 if (if_false == fall_through) {
659 __ B(cond, if_true);
660 } else if (if_true == fall_through) {
661 DCHECK(if_false != fall_through);
662 __ B(NegateCondition(cond), if_false);
663 } else {
664 __ B(cond, if_true);
665 __ B(if_false);
666 }
667}
668
669
670MemOperand FullCodeGenerator::StackOperand(Variable* var) {
671 // Offset is negative because higher indexes are at lower addresses.
672 int offset = -var->index() * kXRegSize;
673 // Adjust by a (parameter or local) base offset.
674 if (var->IsParameter()) {
675 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
676 } else {
677 offset += JavaScriptFrameConstants::kLocal0Offset;
678 }
679 return MemOperand(fp, offset);
680}
681
682
683MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
684 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
685 if (var->IsContextSlot()) {
686 int context_chain_length = scope()->ContextChainLength(var->scope());
687 __ LoadContext(scratch, context_chain_length);
688 return ContextMemOperand(scratch, var->index());
689 } else {
690 return StackOperand(var);
691 }
692}
693
694
695void FullCodeGenerator::GetVar(Register dest, Variable* var) {
696 // Use destination as scratch.
697 MemOperand location = VarOperand(var, dest);
698 __ Ldr(dest, location);
699}
700
701
702void FullCodeGenerator::SetVar(Variable* var,
703 Register src,
704 Register scratch0,
705 Register scratch1) {
706 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
707 DCHECK(!AreAliased(src, scratch0, scratch1));
708 MemOperand location = VarOperand(var, scratch0);
709 __ Str(src, location);
710
711 // Emit the write barrier code if the location is in the heap.
712 if (var->IsContextSlot()) {
713 // scratch0 contains the correct context.
714 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
715 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
716 }
717}
718
719
720void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
721 bool should_normalize,
722 Label* if_true,
723 Label* if_false) {
724 // Only prepare for bailouts before splits if we're in a test
725 // context. Otherwise, we let the Visit function deal with the
726 // preparation to avoid preparing with the same AST id twice.
727 if (!context()->IsTest()) return;
728
729 // TODO(all): Investigate to see if there is something to work on here.
730 Label skip;
731 if (should_normalize) {
732 __ B(&skip);
733 }
734 PrepareForBailout(expr, TOS_REG);
735 if (should_normalize) {
736 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
737 Split(eq, if_true, if_false, NULL);
738 __ Bind(&skip);
739 }
740}
741
742
743void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
744 // The variable in the declaration always resides in the current function
745 // context.
746 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100747 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000748 // Check that we're not inside a with or catch context.
749 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
750 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
751 __ Check(ne, kDeclarationInWithContext);
752 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
753 __ Check(ne, kDeclarationInCatchContext);
754 }
755}
756
757
758void FullCodeGenerator::VisitVariableDeclaration(
759 VariableDeclaration* declaration) {
760 // If it was not possible to allocate the variable at compile time, we
761 // need to "declare" it at runtime to make sure it actually exists in the
762 // local context.
763 VariableProxy* proxy = declaration->proxy();
764 VariableMode mode = declaration->mode();
765 Variable* variable = proxy->var();
766 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
767
768 switch (variable->location()) {
769 case VariableLocation::GLOBAL:
770 case VariableLocation::UNALLOCATED:
771 globals_->Add(variable->name(), zone());
772 globals_->Add(variable->binding_needs_init()
773 ? isolate()->factory()->the_hole_value()
774 : isolate()->factory()->undefined_value(),
775 zone());
776 break;
777
778 case VariableLocation::PARAMETER:
779 case VariableLocation::LOCAL:
780 if (hole_init) {
781 Comment cmnt(masm_, "[ VariableDeclaration");
782 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
783 __ Str(x10, StackOperand(variable));
784 }
785 break;
786
787 case VariableLocation::CONTEXT:
788 if (hole_init) {
789 Comment cmnt(masm_, "[ VariableDeclaration");
790 EmitDebugCheckDeclarationContext(variable);
791 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
792 __ Str(x10, ContextMemOperand(cp, variable->index()));
793 // No write barrier since the_hole_value is in old space.
794 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
795 }
796 break;
797
798 case VariableLocation::LOOKUP: {
799 Comment cmnt(masm_, "[ VariableDeclaration");
800 __ Mov(x2, Operand(variable->name()));
801 // Declaration nodes are always introduced in one of four modes.
802 DCHECK(IsDeclaredVariableMode(mode));
803 // Push initial value, if any.
804 // Note: For variables we must not push an initial value (such as
805 // 'undefined') because we may have a (legal) redeclaration and we
806 // must not destroy the current value.
807 if (hole_init) {
808 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
809 __ Push(x2, x0);
810 } else {
811 // Pushing 0 (xzr) indicates no initial value.
812 __ Push(x2, xzr);
813 }
814 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
815 __ CallRuntime(Runtime::kDeclareLookupSlot);
816 break;
817 }
818 }
819}
820
821
822void FullCodeGenerator::VisitFunctionDeclaration(
823 FunctionDeclaration* declaration) {
824 VariableProxy* proxy = declaration->proxy();
825 Variable* variable = proxy->var();
826 switch (variable->location()) {
827 case VariableLocation::GLOBAL:
828 case VariableLocation::UNALLOCATED: {
829 globals_->Add(variable->name(), zone());
830 Handle<SharedFunctionInfo> function =
831 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
832 // Check for stack overflow exception.
833 if (function.is_null()) return SetStackOverflow();
834 globals_->Add(function, zone());
835 break;
836 }
837
838 case VariableLocation::PARAMETER:
839 case VariableLocation::LOCAL: {
840 Comment cmnt(masm_, "[ Function Declaration");
841 VisitForAccumulatorValue(declaration->fun());
842 __ Str(result_register(), StackOperand(variable));
843 break;
844 }
845
846 case VariableLocation::CONTEXT: {
847 Comment cmnt(masm_, "[ Function Declaration");
848 EmitDebugCheckDeclarationContext(variable);
849 VisitForAccumulatorValue(declaration->fun());
850 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
851 int offset = Context::SlotOffset(variable->index());
852 // We know that we have written a function, which is not a smi.
853 __ RecordWriteContextSlot(cp,
854 offset,
855 result_register(),
856 x2,
857 kLRHasBeenSaved,
858 kDontSaveFPRegs,
859 EMIT_REMEMBERED_SET,
860 OMIT_SMI_CHECK);
861 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
862 break;
863 }
864
865 case VariableLocation::LOOKUP: {
866 Comment cmnt(masm_, "[ Function Declaration");
867 __ Mov(x2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100868 PushOperand(x2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000869 // Push initial value for function declaration.
870 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100871 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
872 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873 break;
874 }
875 }
876}
877
878
879void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
880 // Call the runtime to declare the globals.
881 __ Mov(x11, Operand(pairs));
882 Register flags = xzr;
883 if (Smi::FromInt(DeclareGlobalsFlags())) {
884 flags = x10;
885 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
886 }
887 __ Push(x11, flags);
888 __ CallRuntime(Runtime::kDeclareGlobals);
889 // Return value is ignored.
890}
891
892
893void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
894 // Call the runtime to declare the modules.
895 __ Push(descriptions);
896 __ CallRuntime(Runtime::kDeclareModules);
897 // Return value is ignored.
898}
899
900
901void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
902 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
903 Comment cmnt(masm_, "[ SwitchStatement");
904 Breakable nested_statement(this, stmt);
905 SetStatementPosition(stmt);
906
907 // Keep the switch value on the stack until a case matches.
908 VisitForStackValue(stmt->tag());
909 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
910
911 ZoneList<CaseClause*>* clauses = stmt->cases();
912 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
913
914 Label next_test; // Recycled for each test.
915 // Compile all the tests with branches to their bodies.
916 for (int i = 0; i < clauses->length(); i++) {
917 CaseClause* clause = clauses->at(i);
918 clause->body_target()->Unuse();
919
920 // The default is not a test, but remember it as final fall through.
921 if (clause->is_default()) {
922 default_clause = clause;
923 continue;
924 }
925
926 Comment cmnt(masm_, "[ Case comparison");
927 __ Bind(&next_test);
928 next_test.Unuse();
929
930 // Compile the label expression.
931 VisitForAccumulatorValue(clause->label());
932
933 // Perform the comparison as if via '==='.
934 __ Peek(x1, 0); // Switch value.
935
936 JumpPatchSite patch_site(masm_);
937 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
938 Label slow_case;
939 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
940 __ Cmp(x1, x0);
941 __ B(ne, &next_test);
942 __ Drop(1); // Switch value is no longer needed.
943 __ B(clause->body_target());
944 __ Bind(&slow_case);
945 }
946
947 // Record position before stub call for type feedback.
948 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100949 Handle<Code> ic =
950 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000951 CallIC(ic, clause->CompareId());
952 patch_site.EmitPatchInfo();
953
954 Label skip;
955 __ B(&skip);
956 PrepareForBailout(clause, TOS_REG);
957 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
958 __ Drop(1);
959 __ B(clause->body_target());
960 __ Bind(&skip);
961
962 __ Cbnz(x0, &next_test);
963 __ Drop(1); // Switch value is no longer needed.
964 __ B(clause->body_target());
965 }
966
967 // Discard the test value and jump to the default if present, otherwise to
968 // the end of the statement.
969 __ Bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100970 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000971 if (default_clause == NULL) {
972 __ B(nested_statement.break_label());
973 } else {
974 __ B(default_clause->body_target());
975 }
976
977 // Compile all the case bodies.
978 for (int i = 0; i < clauses->length(); i++) {
979 Comment cmnt(masm_, "[ Case body");
980 CaseClause* clause = clauses->at(i);
981 __ Bind(clause->body_target());
982 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
983 VisitStatements(clause->statements());
984 }
985
986 __ Bind(nested_statement.break_label());
987 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
988}
989
990
991void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
992 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
993 Comment cmnt(masm_, "[ ForInStatement");
994 SetStatementPosition(stmt, SKIP_BREAK);
995
996 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
997
998 // TODO(all): This visitor probably needs better comments and a revisit.
999
1000 Label loop, exit;
1001 ForIn loop_statement(this, stmt);
1002 increment_loop_depth();
1003
Ben Murdoch097c5b22016-05-18 11:27:45 +01001004 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 SetExpressionAsStatementPosition(stmt->enumerable());
1006 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001007 OperandStackDepthIncrement(ForIn::kElementCount);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001008
Ben Murdoch097c5b22016-05-18 11:27:45 +01001009 // If the object is null or undefined, skip over the loop, otherwise convert
1010 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001011 Label convert, done_convert;
1012 __ JumpIfSmi(x0, &convert);
1013 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, &done_convert, ge);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001014 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, &exit);
1015 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001016 __ Bind(&convert);
1017 ToObjectStub stub(isolate());
1018 __ CallStub(&stub);
1019 __ Bind(&done_convert);
1020 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1021 __ Push(x0);
1022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001023 // Check cache validity in generated code. This is a fast case for
1024 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1025 // guarantee cache validity, call the runtime system to check cache
1026 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001027 // Note: Proxies never have an enum cache, so will always take the
1028 // slow path.
1029 Label call_runtime;
1030 __ CheckEnumCache(x0, x15, x10, x11, x12, x13, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001031
1032 // The enum cache is valid. Load the map of the object being
1033 // iterated over and use the cache for the iteration.
1034 Label use_cache;
1035 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1036 __ B(&use_cache);
1037
1038 // Get the set of properties to enumerate.
1039 __ Bind(&call_runtime);
1040 __ Push(x0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001041 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1043
1044 // If we got a map from the runtime call, we can do a fast
1045 // modification check. Otherwise, we got a fixed array, and we have
1046 // to do a slow check.
1047 Label fixed_array, no_descriptors;
1048 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1049 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1050
1051 // We got a map in register x0. Get the enumeration cache from it.
1052 __ Bind(&use_cache);
1053
1054 __ EnumLengthUntagged(x1, x0);
1055 __ Cbz(x1, &no_descriptors);
1056
1057 __ LoadInstanceDescriptors(x0, x2);
1058 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1059 __ Ldr(x2,
1060 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1061
1062 // Set up the four remaining stack slots.
1063 __ SmiTag(x1);
1064 // Map, enumeration cache, enum cache length, zero (both last as smis).
1065 __ Push(x0, x2, x1, xzr);
1066 __ B(&loop);
1067
1068 __ Bind(&no_descriptors);
1069 __ Drop(1);
1070 __ B(&exit);
1071
1072 // We got a fixed array in register x0. Iterate through that.
1073 __ Bind(&fixed_array);
1074
Ben Murdoch097c5b22016-05-18 11:27:45 +01001075 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001076 __ EmitLoadTypeFeedbackVector(x1);
1077 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001078 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1079 __ Mov(x1, Smi::FromInt(1)); // Smi(1) indicates slow check.
1080 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001081 __ Push(x1, x0, x2); // Smi and array, fixed array length (as smi).
1082 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
1083 __ Push(xzr); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084
1085 // Generate code for doing the condition check.
1086 __ Bind(&loop);
1087 SetExpressionAsStatementPosition(stmt->each());
1088
1089 // Load the current count to x0, load the length to x1.
1090 __ PeekPair(x0, x1, 0);
1091 __ Cmp(x0, x1); // Compare to the array length.
1092 __ B(hs, loop_statement.break_label());
1093
1094 // Get the current entry of the array into register r3.
1095 __ Peek(x10, 2 * kXRegSize);
1096 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1097 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1098
1099 // Get the expected map from the stack or a smi in the
1100 // permanent slow case into register x10.
1101 __ Peek(x2, 3 * kXRegSize);
1102
1103 // Check if the expected map still matches that of the enumerable.
1104 // If not, we may have to filter the key.
1105 Label update_each;
1106 __ Peek(x1, 4 * kXRegSize);
1107 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1108 __ Cmp(x11, x2);
1109 __ B(eq, &update_each);
1110
Ben Murdoch097c5b22016-05-18 11:27:45 +01001111 // We might get here from TurboFan or Crankshaft when something in the
1112 // for-in loop body deopts and only now notice in fullcodegen, that we
1113 // can now longer use the enum cache, i.e. left fast mode. So better record
1114 // this information here, in case we later OSR back into this loop or
1115 // reoptimize the whole function w/o rerunning the loop with the slow
1116 // mode object in fullcodegen (which would result in a deopt loop).
1117 __ EmitLoadTypeFeedbackVector(x0);
1118 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1119 __ Str(x10, FieldMemOperand(x0, FixedArray::OffsetOfElementAt(vector_index)));
1120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001121 // Convert the entry to a string or (smi) 0 if it isn't a property
1122 // any more. If the property has been removed while iterating, we
1123 // just skip it.
1124 __ Push(x1, x3);
1125 __ CallRuntime(Runtime::kForInFilter);
1126 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1127 __ Mov(x3, x0);
1128 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1129 loop_statement.continue_label());
1130
1131 // Update the 'each' property or variable from the possibly filtered
1132 // entry in register x3.
1133 __ Bind(&update_each);
1134 __ Mov(result_register(), x3);
1135 // Perform the assignment as if via '='.
1136 { EffectContext context(this);
1137 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1138 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1139 }
1140
1141 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1142 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1143 // Generate code for the body of the loop.
1144 Visit(stmt->body());
1145
1146 // Generate code for going to the next element by incrementing
1147 // the index (smi) stored on top of the stack.
1148 __ Bind(loop_statement.continue_label());
1149 // TODO(all): We could use a callee saved register to avoid popping.
1150 __ Pop(x0);
1151 __ Add(x0, x0, Smi::FromInt(1));
1152 __ Push(x0);
1153
1154 EmitBackEdgeBookkeeping(stmt, &loop);
1155 __ B(&loop);
1156
1157 // Remove the pointers stored on the stack.
1158 __ Bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001159 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160
1161 // Exit and decrement the loop depth.
1162 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1163 __ Bind(&exit);
1164 decrement_loop_depth();
1165}
1166
1167
1168void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1169 bool pretenure) {
1170 // Use the fast case closure allocation code that allocates in new space for
1171 // nested functions that don't need literals cloning. If we're running with
1172 // the --always-opt or the --prepare-always-opt flag, we need to use the
1173 // runtime function so that the new function we are creating here gets a
1174 // chance to have its code optimized and doesn't just get a copy of the
1175 // existing unoptimized code.
1176 if (!FLAG_always_opt &&
1177 !FLAG_prepare_always_opt &&
1178 !pretenure &&
1179 scope()->is_function_scope() &&
1180 info->num_literals() == 0) {
1181 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1182 __ Mov(x2, Operand(info));
1183 __ CallStub(&stub);
1184 } else {
1185 __ Push(info);
1186 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1187 : Runtime::kNewClosure);
1188 }
1189 context()->Plug(x0);
1190}
1191
1192
1193void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1194 FeedbackVectorSlot slot) {
1195 DCHECK(NeedsHomeObject(initializer));
1196 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1197 __ Mov(StoreDescriptor::NameRegister(),
1198 Operand(isolate()->factory()->home_object_symbol()));
1199 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1200 EmitLoadStoreICSlot(slot);
1201 CallStoreIC();
1202}
1203
1204
1205void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1206 int offset,
1207 FeedbackVectorSlot slot) {
1208 DCHECK(NeedsHomeObject(initializer));
1209 __ Move(StoreDescriptor::ReceiverRegister(), x0);
1210 __ Mov(StoreDescriptor::NameRegister(),
1211 Operand(isolate()->factory()->home_object_symbol()));
1212 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1213 EmitLoadStoreICSlot(slot);
1214 CallStoreIC();
1215}
1216
1217
1218void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1219 TypeofMode typeof_mode,
1220 Label* slow) {
1221 Register current = cp;
1222 Register next = x10;
1223 Register temp = x11;
1224
1225 Scope* s = scope();
1226 while (s != NULL) {
1227 if (s->num_heap_slots() > 0) {
1228 if (s->calls_sloppy_eval()) {
1229 // Check that extension is "the hole".
1230 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1231 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1232 }
1233 // Load next context in chain.
1234 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1235 // Walk the rest of the chain without clobbering cp.
1236 current = next;
1237 }
1238 // If no outer scope calls eval, we do not need to check more
1239 // context extensions.
1240 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1241 s = s->outer_scope();
1242 }
1243
1244 if (s->is_eval_scope()) {
1245 Label loop, fast;
1246 __ Mov(next, current);
1247
1248 __ Bind(&loop);
1249 // Terminate at native context.
1250 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1251 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1252 // Check that extension is "the hole".
1253 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1254 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1255 // Load next context in chain.
1256 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1257 __ B(&loop);
1258 __ Bind(&fast);
1259 }
1260
1261 // All extension objects were empty and it is safe to use a normal global
1262 // load machinery.
1263 EmitGlobalVariableLoad(proxy, typeof_mode);
1264}
1265
1266
1267MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1268 Label* slow) {
1269 DCHECK(var->IsContextSlot());
1270 Register context = cp;
1271 Register next = x10;
1272 Register temp = x11;
1273
1274 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1275 if (s->num_heap_slots() > 0) {
1276 if (s->calls_sloppy_eval()) {
1277 // Check that extension is "the hole".
1278 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1279 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1280 }
1281 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1282 // Walk the rest of the chain without clobbering cp.
1283 context = next;
1284 }
1285 }
1286 // Check that last extension is "the hole".
1287 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1288 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1289
1290 // This function is used only for loads, not stores, so it's safe to
1291 // return an cp-based operand (the write barrier cannot be allowed to
1292 // destroy the cp register).
1293 return ContextMemOperand(context, var->index());
1294}
1295
1296
1297void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1298 TypeofMode typeof_mode,
1299 Label* slow, Label* done) {
1300 // Generate fast-case code for variables that might be shadowed by
1301 // eval-introduced variables. Eval is used a lot without
1302 // introducing variables. In those cases, we do not want to
1303 // perform a runtime call for all variables in the scope
1304 // containing the eval.
1305 Variable* var = proxy->var();
1306 if (var->mode() == DYNAMIC_GLOBAL) {
1307 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1308 __ B(done);
1309 } else if (var->mode() == DYNAMIC_LOCAL) {
1310 Variable* local = var->local_if_not_shadowed();
1311 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1312 if (local->mode() == LET || local->mode() == CONST ||
1313 local->mode() == CONST_LEGACY) {
1314 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1315 if (local->mode() == CONST_LEGACY) {
1316 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1317 } else { // LET || CONST
1318 __ Mov(x0, Operand(var->name()));
1319 __ Push(x0);
1320 __ CallRuntime(Runtime::kThrowReferenceError);
1321 }
1322 }
1323 __ B(done);
1324 }
1325}
1326
1327
1328void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1329 TypeofMode typeof_mode) {
1330 Variable* var = proxy->var();
1331 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1332 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1333 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1334 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1335 __ Mov(LoadDescriptor::SlotRegister(),
1336 SmiFromSlot(proxy->VariableFeedbackSlot()));
1337 CallLoadIC(typeof_mode);
1338}
1339
1340
1341void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1342 TypeofMode typeof_mode) {
1343 // Record position before possible IC call.
1344 SetExpressionPosition(proxy);
1345 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1346 Variable* var = proxy->var();
1347
1348 // Three cases: global variables, lookup variables, and all other types of
1349 // variables.
1350 switch (var->location()) {
1351 case VariableLocation::GLOBAL:
1352 case VariableLocation::UNALLOCATED: {
1353 Comment cmnt(masm_, "Global variable");
1354 EmitGlobalVariableLoad(proxy, typeof_mode);
1355 context()->Plug(x0);
1356 break;
1357 }
1358
1359 case VariableLocation::PARAMETER:
1360 case VariableLocation::LOCAL:
1361 case VariableLocation::CONTEXT: {
1362 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1363 Comment cmnt(masm_, var->IsContextSlot()
1364 ? "Context variable"
1365 : "Stack variable");
1366 if (NeedsHoleCheckForLoad(proxy)) {
1367 // Let and const need a read barrier.
1368 GetVar(x0, var);
1369 Label done;
1370 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1371 if (var->mode() == LET || var->mode() == CONST) {
1372 // Throw a reference error when using an uninitialized let/const
1373 // binding in harmony mode.
1374 __ Mov(x0, Operand(var->name()));
1375 __ Push(x0);
1376 __ CallRuntime(Runtime::kThrowReferenceError);
1377 __ Bind(&done);
1378 } else {
1379 // Uninitialized legacy const bindings are unholed.
1380 DCHECK(var->mode() == CONST_LEGACY);
1381 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1382 __ Bind(&done);
1383 }
1384 context()->Plug(x0);
1385 break;
1386 }
1387 context()->Plug(var);
1388 break;
1389 }
1390
1391 case VariableLocation::LOOKUP: {
1392 Label done, slow;
1393 // Generate code for loading from variables potentially shadowed by
1394 // eval-introduced variables.
1395 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1396 __ Bind(&slow);
1397 Comment cmnt(masm_, "Lookup variable");
Ben Murdoch097c5b22016-05-18 11:27:45 +01001398 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001399 Runtime::FunctionId function_id =
1400 typeof_mode == NOT_INSIDE_TYPEOF
1401 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001402 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001403 __ CallRuntime(function_id);
1404 __ Bind(&done);
1405 context()->Plug(x0);
1406 break;
1407 }
1408 }
1409}
1410
1411
1412void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1413 Comment cmnt(masm_, "[ RegExpLiteral");
1414 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1415 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1416 __ Mov(x1, Operand(expr->pattern()));
1417 __ Mov(x0, Smi::FromInt(expr->flags()));
1418 FastCloneRegExpStub stub(isolate());
1419 __ CallStub(&stub);
1420 context()->Plug(x0);
1421}
1422
1423
1424void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1425 Expression* expression = (property == NULL) ? NULL : property->value();
1426 if (expression == NULL) {
1427 __ LoadRoot(x10, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001428 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001429 } else {
1430 VisitForStackValue(expression);
1431 if (NeedsHomeObject(expression)) {
1432 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1433 property->kind() == ObjectLiteral::Property::SETTER);
1434 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1435 EmitSetHomeObject(expression, offset, property->GetSlot());
1436 }
1437 }
1438}
1439
1440
1441void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1442 Comment cmnt(masm_, "[ ObjectLiteral");
1443
1444 Handle<FixedArray> constant_properties = expr->constant_properties();
1445 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1446 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1447 __ Mov(x1, Operand(constant_properties));
1448 int flags = expr->ComputeFlags();
1449 __ Mov(x0, Smi::FromInt(flags));
1450 if (MustCreateObjectLiteralWithRuntime(expr)) {
1451 __ Push(x3, x2, x1, x0);
1452 __ CallRuntime(Runtime::kCreateObjectLiteral);
1453 } else {
1454 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1455 __ CallStub(&stub);
1456 }
1457 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1458
1459 // If result_saved is true the result is on top of the stack. If
1460 // result_saved is false the result is in x0.
1461 bool result_saved = false;
1462
1463 AccessorTable accessor_table(zone());
1464 int property_index = 0;
1465 for (; property_index < expr->properties()->length(); property_index++) {
1466 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1467 if (property->is_computed_name()) break;
1468 if (property->IsCompileTimeValue()) continue;
1469
1470 Literal* key = property->key()->AsLiteral();
1471 Expression* value = property->value();
1472 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001473 PushOperand(x0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 result_saved = true;
1475 }
1476 switch (property->kind()) {
1477 case ObjectLiteral::Property::CONSTANT:
1478 UNREACHABLE();
1479 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1480 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1481 // Fall through.
1482 case ObjectLiteral::Property::COMPUTED:
1483 // It is safe to use [[Put]] here because the boilerplate already
1484 // contains computed properties with an uninitialized value.
1485 if (key->value()->IsInternalizedString()) {
1486 if (property->emit_store()) {
1487 VisitForAccumulatorValue(value);
1488 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1489 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1490 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1491 EmitLoadStoreICSlot(property->GetSlot(0));
1492 CallStoreIC();
1493 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1494
1495 if (NeedsHomeObject(value)) {
1496 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1497 }
1498 } else {
1499 VisitForEffect(value);
1500 }
1501 break;
1502 }
1503 __ Peek(x0, 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001504 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 VisitForStackValue(key);
1506 VisitForStackValue(value);
1507 if (property->emit_store()) {
1508 if (NeedsHomeObject(value)) {
1509 EmitSetHomeObject(value, 2, property->GetSlot());
1510 }
1511 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
Ben Murdoch097c5b22016-05-18 11:27:45 +01001512 PushOperand(x0);
1513 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001514 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001515 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 }
1517 break;
1518 case ObjectLiteral::Property::PROTOTYPE:
1519 DCHECK(property->emit_store());
1520 // Duplicate receiver on stack.
1521 __ Peek(x0, 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001522 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001523 VisitForStackValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001524 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001525 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1526 NO_REGISTERS);
1527 break;
1528 case ObjectLiteral::Property::GETTER:
1529 if (property->emit_store()) {
1530 accessor_table.lookup(key)->second->getter = property;
1531 }
1532 break;
1533 case ObjectLiteral::Property::SETTER:
1534 if (property->emit_store()) {
1535 accessor_table.lookup(key)->second->setter = property;
1536 }
1537 break;
1538 }
1539 }
1540
1541 // Emit code to define accessors, using only a single call to the runtime for
1542 // each pair of corresponding getters and setters.
1543 for (AccessorTable::Iterator it = accessor_table.begin();
1544 it != accessor_table.end();
1545 ++it) {
1546 __ Peek(x10, 0); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001547 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001548 VisitForStackValue(it->first);
1549 EmitAccessor(it->second->getter);
1550 EmitAccessor(it->second->setter);
1551 __ Mov(x10, Smi::FromInt(NONE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001552 PushOperand(x10);
1553 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 }
1555
1556 // Object literals have two parts. The "static" part on the left contains no
1557 // computed property names, and so we can compute its map ahead of time; see
1558 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1559 // starts with the first computed property name, and continues with all
1560 // properties to its right. All the code from above initializes the static
1561 // component of the object literal, and arranges for the map of the result to
1562 // reflect the static order in which the keys appear. For the dynamic
1563 // properties, we compile them into a series of "SetOwnProperty" runtime
1564 // calls. This will preserve insertion order.
1565 for (; property_index < expr->properties()->length(); property_index++) {
1566 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1567
1568 Expression* value = property->value();
1569 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001570 PushOperand(x0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001571 result_saved = true;
1572 }
1573
1574 __ Peek(x10, 0); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001575 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576
1577 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1578 DCHECK(!property->is_computed_name());
1579 VisitForStackValue(value);
1580 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001581 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001582 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1583 NO_REGISTERS);
1584 } else {
1585 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1586 VisitForStackValue(value);
1587 if (NeedsHomeObject(value)) {
1588 EmitSetHomeObject(value, 2, property->GetSlot());
1589 }
1590
1591 switch (property->kind()) {
1592 case ObjectLiteral::Property::CONSTANT:
1593 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1594 case ObjectLiteral::Property::COMPUTED:
1595 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001596 PushOperand(Smi::FromInt(NONE));
1597 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1598 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001599 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001600 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001601 }
1602 break;
1603
1604 case ObjectLiteral::Property::PROTOTYPE:
1605 UNREACHABLE();
1606 break;
1607
1608 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001609 PushOperand(Smi::FromInt(NONE));
1610 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001611 break;
1612
1613 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001614 PushOperand(Smi::FromInt(NONE));
1615 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001616 break;
1617 }
1618 }
1619 }
1620
1621 if (expr->has_function()) {
1622 DCHECK(result_saved);
1623 __ Peek(x0, 0);
1624 __ Push(x0);
1625 __ CallRuntime(Runtime::kToFastProperties);
1626 }
1627
1628 if (result_saved) {
1629 context()->PlugTOS();
1630 } else {
1631 context()->Plug(x0);
1632 }
1633}
1634
1635
1636void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1637 Comment cmnt(masm_, "[ ArrayLiteral");
1638
1639 Handle<FixedArray> constant_elements = expr->constant_elements();
1640 bool has_fast_elements =
1641 IsFastObjectElementsKind(expr->constant_elements_kind());
1642
1643 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1644 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1645 // If the only customer of allocation sites is transitioning, then
1646 // we can turn it off if we don't have anywhere else to transition to.
1647 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1648 }
1649
1650 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1651 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1652 __ Mov(x1, Operand(constant_elements));
1653 if (MustCreateArrayLiteralWithRuntime(expr)) {
1654 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1655 __ Push(x3, x2, x1, x0);
1656 __ CallRuntime(Runtime::kCreateArrayLiteral);
1657 } else {
1658 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1659 __ CallStub(&stub);
1660 }
1661 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1662
1663 bool result_saved = false; // Is the result saved to the stack?
1664 ZoneList<Expression*>* subexprs = expr->values();
1665 int length = subexprs->length();
1666
1667 // Emit code to evaluate all the non-constant subexpressions and to store
1668 // them into the newly cloned array.
1669 int array_index = 0;
1670 for (; array_index < length; array_index++) {
1671 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001672 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001673
1674 // If the subexpression is a literal or a simple materialized literal it
1675 // is already set in the cloned array.
1676 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1677
1678 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001679 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001680 result_saved = true;
1681 }
1682 VisitForAccumulatorValue(subexpr);
1683
1684 __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1685 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1686 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1687 Handle<Code> ic =
1688 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1689 CallIC(ic);
1690
1691 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1692 }
1693
1694 // In case the array literal contains spread expressions it has two parts. The
1695 // first part is the "static" array which has a literal index is handled
1696 // above. The second part is the part after the first spread expression
1697 // (inclusive) and these elements gets appended to the array. Note that the
1698 // number elements an iterable produces is unknown ahead of time.
1699 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001700 PopOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 result_saved = false;
1702 }
1703 for (; array_index < length; array_index++) {
1704 Expression* subexpr = subexprs->at(array_index);
1705
Ben Murdoch097c5b22016-05-18 11:27:45 +01001706 PushOperand(x0);
1707 DCHECK(!subexpr->IsSpread());
1708 VisitForStackValue(subexpr);
1709 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001710
1711 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1712 }
1713
1714 if (result_saved) {
1715 context()->PlugTOS();
1716 } else {
1717 context()->Plug(x0);
1718 }
1719}
1720
1721
1722void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1723 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1724
1725 Comment cmnt(masm_, "[ Assignment");
1726 SetExpressionPosition(expr, INSERT_BREAK);
1727
1728 Property* property = expr->target()->AsProperty();
1729 LhsKind assign_type = Property::GetAssignType(property);
1730
1731 // Evaluate LHS expression.
1732 switch (assign_type) {
1733 case VARIABLE:
1734 // Nothing to do here.
1735 break;
1736 case NAMED_PROPERTY:
1737 if (expr->is_compound()) {
1738 // We need the receiver both on the stack and in the register.
1739 VisitForStackValue(property->obj());
1740 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1741 } else {
1742 VisitForStackValue(property->obj());
1743 }
1744 break;
1745 case NAMED_SUPER_PROPERTY:
1746 VisitForStackValue(
1747 property->obj()->AsSuperPropertyReference()->this_var());
1748 VisitForAccumulatorValue(
1749 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001750 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 if (expr->is_compound()) {
1752 const Register scratch = x10;
1753 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001754 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755 }
1756 break;
1757 case KEYED_SUPER_PROPERTY:
1758 VisitForStackValue(
1759 property->obj()->AsSuperPropertyReference()->this_var());
1760 VisitForStackValue(
1761 property->obj()->AsSuperPropertyReference()->home_object());
1762 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001763 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764 if (expr->is_compound()) {
1765 const Register scratch1 = x10;
1766 const Register scratch2 = x11;
1767 __ Peek(scratch1, 2 * kPointerSize);
1768 __ Peek(scratch2, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001769 PushOperands(scratch1, scratch2, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001770 }
1771 break;
1772 case KEYED_PROPERTY:
1773 if (expr->is_compound()) {
1774 VisitForStackValue(property->obj());
1775 VisitForStackValue(property->key());
1776 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1777 __ Peek(LoadDescriptor::NameRegister(), 0);
1778 } else {
1779 VisitForStackValue(property->obj());
1780 VisitForStackValue(property->key());
1781 }
1782 break;
1783 }
1784
1785 // For compound assignments we need another deoptimization point after the
1786 // variable/property load.
1787 if (expr->is_compound()) {
1788 { AccumulatorValueContext context(this);
1789 switch (assign_type) {
1790 case VARIABLE:
1791 EmitVariableLoad(expr->target()->AsVariableProxy());
1792 PrepareForBailout(expr->target(), TOS_REG);
1793 break;
1794 case NAMED_PROPERTY:
1795 EmitNamedPropertyLoad(property);
1796 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1797 break;
1798 case NAMED_SUPER_PROPERTY:
1799 EmitNamedSuperPropertyLoad(property);
1800 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1801 break;
1802 case KEYED_SUPER_PROPERTY:
1803 EmitKeyedSuperPropertyLoad(property);
1804 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1805 break;
1806 case KEYED_PROPERTY:
1807 EmitKeyedPropertyLoad(property);
1808 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1809 break;
1810 }
1811 }
1812
1813 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001814 PushOperand(x0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001815 VisitForAccumulatorValue(expr->value());
1816
1817 AccumulatorValueContext context(this);
1818 if (ShouldInlineSmiCase(op)) {
1819 EmitInlineSmiBinaryOp(expr->binary_operation(),
1820 op,
1821 expr->target(),
1822 expr->value());
1823 } else {
1824 EmitBinaryOp(expr->binary_operation(), op);
1825 }
1826
1827 // Deoptimization point in case the binary operation may have side effects.
1828 PrepareForBailout(expr->binary_operation(), TOS_REG);
1829 } else {
1830 VisitForAccumulatorValue(expr->value());
1831 }
1832
1833 SetExpressionPosition(expr);
1834
1835 // Store the value.
1836 switch (assign_type) {
1837 case VARIABLE:
1838 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1839 expr->op(), expr->AssignmentSlot());
1840 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1841 context()->Plug(x0);
1842 break;
1843 case NAMED_PROPERTY:
1844 EmitNamedPropertyAssignment(expr);
1845 break;
1846 case NAMED_SUPER_PROPERTY:
1847 EmitNamedSuperPropertyStore(property);
1848 context()->Plug(x0);
1849 break;
1850 case KEYED_SUPER_PROPERTY:
1851 EmitKeyedSuperPropertyStore(property);
1852 context()->Plug(x0);
1853 break;
1854 case KEYED_PROPERTY:
1855 EmitKeyedPropertyAssignment(expr);
1856 break;
1857 }
1858}
1859
1860
1861void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1862 SetExpressionPosition(prop);
1863 Literal* key = prop->key()->AsLiteral();
1864 DCHECK(!prop->IsSuperAccess());
1865
1866 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
1867 __ Mov(LoadDescriptor::SlotRegister(),
1868 SmiFromSlot(prop->PropertyFeedbackSlot()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001869 CallLoadIC(NOT_INSIDE_TYPEOF);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001870}
1871
1872
1873void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1874 Token::Value op,
1875 Expression* left_expr,
1876 Expression* right_expr) {
1877 Label done, both_smis, stub_call;
1878
1879 // Get the arguments.
1880 Register left = x1;
1881 Register right = x0;
1882 Register result = x0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001883 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001884
1885 // Perform combined smi check on both operands.
1886 __ Orr(x10, left, right);
1887 JumpPatchSite patch_site(masm_);
1888 patch_site.EmitJumpIfSmi(x10, &both_smis);
1889
1890 __ Bind(&stub_call);
1891
Ben Murdoch097c5b22016-05-18 11:27:45 +01001892 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001893 {
1894 Assembler::BlockPoolsScope scope(masm_);
1895 CallIC(code, expr->BinaryOperationFeedbackId());
1896 patch_site.EmitPatchInfo();
1897 }
1898 __ B(&done);
1899
1900 __ Bind(&both_smis);
1901 // Smi case. This code works in the same way as the smi-smi case in the type
1902 // recording binary operation stub, see
1903 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1904 // TODO(all): That doesn't exist any more. Where are the comments?
1905 //
1906 // The set of operations that needs to be supported here is controlled by
1907 // FullCodeGenerator::ShouldInlineSmiCase().
1908 switch (op) {
1909 case Token::SAR:
1910 __ Ubfx(right, right, kSmiShift, 5);
1911 __ Asr(result, left, right);
1912 __ Bic(result, result, kSmiShiftMask);
1913 break;
1914 case Token::SHL:
1915 __ Ubfx(right, right, kSmiShift, 5);
1916 __ Lsl(result, left, right);
1917 break;
1918 case Token::SHR:
1919 // If `left >>> right` >= 0x80000000, the result is not representable in a
1920 // signed 32-bit smi.
1921 __ Ubfx(right, right, kSmiShift, 5);
1922 __ Lsr(x10, left, right);
1923 __ Tbnz(x10, kXSignBit, &stub_call);
1924 __ Bic(result, x10, kSmiShiftMask);
1925 break;
1926 case Token::ADD:
1927 __ Adds(x10, left, right);
1928 __ B(vs, &stub_call);
1929 __ Mov(result, x10);
1930 break;
1931 case Token::SUB:
1932 __ Subs(x10, left, right);
1933 __ B(vs, &stub_call);
1934 __ Mov(result, x10);
1935 break;
1936 case Token::MUL: {
1937 Label not_minus_zero, done;
1938 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
1939 STATIC_ASSERT(kSmiTag == 0);
1940 __ Smulh(x10, left, right);
1941 __ Cbnz(x10, &not_minus_zero);
1942 __ Eor(x11, left, right);
1943 __ Tbnz(x11, kXSignBit, &stub_call);
1944 __ Mov(result, x10);
1945 __ B(&done);
1946 __ Bind(&not_minus_zero);
1947 __ Cls(x11, x10);
1948 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
1949 __ B(lt, &stub_call);
1950 __ SmiTag(result, x10);
1951 __ Bind(&done);
1952 break;
1953 }
1954 case Token::BIT_OR:
1955 __ Orr(result, left, right);
1956 break;
1957 case Token::BIT_AND:
1958 __ And(result, left, right);
1959 break;
1960 case Token::BIT_XOR:
1961 __ Eor(result, left, right);
1962 break;
1963 default:
1964 UNREACHABLE();
1965 }
1966
1967 __ Bind(&done);
1968 context()->Plug(x0);
1969}
1970
1971
1972void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001973 PopOperand(x1);
1974 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001975 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
1976 {
1977 Assembler::BlockPoolsScope scope(masm_);
1978 CallIC(code, expr->BinaryOperationFeedbackId());
1979 patch_site.EmitPatchInfo();
1980 }
1981 context()->Plug(x0);
1982}
1983
1984
1985void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001986 for (int i = 0; i < lit->properties()->length(); i++) {
1987 ObjectLiteral::Property* property = lit->properties()->at(i);
1988 Expression* value = property->value();
1989
Ben Murdoch097c5b22016-05-18 11:27:45 +01001990 Register scratch = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001991 if (property->is_static()) {
1992 __ Peek(scratch, kPointerSize); // constructor
1993 } else {
1994 __ Peek(scratch, 0); // prototype
1995 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001996 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001997 EmitPropertyKey(property, lit->GetIdForProperty(i));
1998
1999 // The static prototype property is read only. We handle the non computed
2000 // property name case in the parser. Since this is the only case where we
2001 // need to check for an own read only property we special case this so we do
2002 // not need to do this for every property.
2003 if (property->is_static() && property->is_computed_name()) {
2004 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2005 __ Push(x0);
2006 }
2007
2008 VisitForStackValue(value);
2009 if (NeedsHomeObject(value)) {
2010 EmitSetHomeObject(value, 2, property->GetSlot());
2011 }
2012
2013 switch (property->kind()) {
2014 case ObjectLiteral::Property::CONSTANT:
2015 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2016 case ObjectLiteral::Property::PROTOTYPE:
2017 UNREACHABLE();
2018 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002019 PushOperand(Smi::FromInt(DONT_ENUM));
2020 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2021 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002022 break;
2023
2024 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002025 PushOperand(Smi::FromInt(DONT_ENUM));
2026 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002027 break;
2028
2029 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01002030 PushOperand(Smi::FromInt(DONT_ENUM));
2031 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002032 break;
2033
2034 default:
2035 UNREACHABLE();
2036 }
2037 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002038}
2039
2040
2041void FullCodeGenerator::EmitAssignment(Expression* expr,
2042 FeedbackVectorSlot slot) {
2043 DCHECK(expr->IsValidReferenceExpressionOrThis());
2044
2045 Property* prop = expr->AsProperty();
2046 LhsKind assign_type = Property::GetAssignType(prop);
2047
2048 switch (assign_type) {
2049 case VARIABLE: {
2050 Variable* var = expr->AsVariableProxy()->var();
2051 EffectContext context(this);
2052 EmitVariableAssignment(var, Token::ASSIGN, slot);
2053 break;
2054 }
2055 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002056 PushOperand(x0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 VisitForAccumulatorValue(prop->obj());
2058 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2059 // this copy.
2060 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002061 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002062 __ Mov(StoreDescriptor::NameRegister(),
2063 Operand(prop->key()->AsLiteral()->value()));
2064 EmitLoadStoreICSlot(slot);
2065 CallStoreIC();
2066 break;
2067 }
2068 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002069 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002070 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2071 VisitForAccumulatorValue(
2072 prop->obj()->AsSuperPropertyReference()->home_object());
2073 // stack: value, this; x0: home_object
2074 Register scratch = x10;
2075 Register scratch2 = x11;
2076 __ mov(scratch, result_register()); // home_object
2077 __ Peek(x0, kPointerSize); // value
2078 __ Peek(scratch2, 0); // this
2079 __ Poke(scratch2, kPointerSize); // this
2080 __ Poke(scratch, 0); // home_object
2081 // stack: this, home_object; x0: value
2082 EmitNamedSuperPropertyStore(prop);
2083 break;
2084 }
2085 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002086 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002087 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2088 VisitForStackValue(
2089 prop->obj()->AsSuperPropertyReference()->home_object());
2090 VisitForAccumulatorValue(prop->key());
2091 Register scratch = x10;
2092 Register scratch2 = x11;
2093 __ Peek(scratch2, 2 * kPointerSize); // value
2094 // stack: value, this, home_object; x0: key, x11: value
2095 __ Peek(scratch, kPointerSize); // this
2096 __ Poke(scratch, 2 * kPointerSize);
2097 __ Peek(scratch, 0); // home_object
2098 __ Poke(scratch, kPointerSize);
2099 __ Poke(x0, 0);
2100 __ Move(x0, scratch2);
2101 // stack: this, home_object, key; x0: value.
2102 EmitKeyedSuperPropertyStore(prop);
2103 break;
2104 }
2105 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002106 PushOperand(x0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002107 VisitForStackValue(prop->obj());
2108 VisitForAccumulatorValue(prop->key());
2109 __ Mov(StoreDescriptor::NameRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002110 PopOperands(StoreDescriptor::ReceiverRegister(),
2111 StoreDescriptor::ValueRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 EmitLoadStoreICSlot(slot);
2113 Handle<Code> ic =
2114 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2115 CallIC(ic);
2116 break;
2117 }
2118 }
2119 context()->Plug(x0);
2120}
2121
2122
2123void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2124 Variable* var, MemOperand location) {
2125 __ Str(result_register(), location);
2126 if (var->IsContextSlot()) {
2127 // RecordWrite may destroy all its register arguments.
2128 __ Mov(x10, result_register());
2129 int offset = Context::SlotOffset(var->index());
2130 __ RecordWriteContextSlot(
2131 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2132 }
2133}
2134
2135
2136void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2137 FeedbackVectorSlot slot) {
2138 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2139 if (var->IsUnallocated()) {
2140 // Global var, const, or let.
2141 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2142 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2143 EmitLoadStoreICSlot(slot);
2144 CallStoreIC();
2145
2146 } else if (var->mode() == LET && op != Token::INIT) {
2147 // Non-initializing assignment to let variable needs a write barrier.
2148 DCHECK(!var->IsLookupSlot());
2149 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2150 Label assign;
2151 MemOperand location = VarOperand(var, x1);
2152 __ Ldr(x10, location);
2153 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2154 __ Mov(x10, Operand(var->name()));
2155 __ Push(x10);
2156 __ CallRuntime(Runtime::kThrowReferenceError);
2157 // Perform the assignment.
2158 __ Bind(&assign);
2159 EmitStoreToStackLocalOrContextSlot(var, location);
2160
2161 } else if (var->mode() == CONST && op != Token::INIT) {
2162 // Assignment to const variable needs a write barrier.
2163 DCHECK(!var->IsLookupSlot());
2164 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2165 Label const_error;
2166 MemOperand location = VarOperand(var, x1);
2167 __ Ldr(x10, location);
2168 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2169 __ Mov(x10, Operand(var->name()));
2170 __ Push(x10);
2171 __ CallRuntime(Runtime::kThrowReferenceError);
2172 __ Bind(&const_error);
2173 __ CallRuntime(Runtime::kThrowConstAssignError);
2174
2175 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2176 // Initializing assignment to const {this} needs a write barrier.
2177 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2178 Label uninitialized_this;
2179 MemOperand location = VarOperand(var, x1);
2180 __ Ldr(x10, location);
2181 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2182 __ Mov(x0, Operand(var->name()));
2183 __ Push(x0);
2184 __ CallRuntime(Runtime::kThrowReferenceError);
2185 __ bind(&uninitialized_this);
2186 EmitStoreToStackLocalOrContextSlot(var, location);
2187
2188 } else if (!var->is_const_mode() ||
2189 (var->mode() == CONST && op == Token::INIT)) {
2190 if (var->IsLookupSlot()) {
2191 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002192 __ Push(var->name());
2193 __ Push(x0);
2194 __ CallRuntime(is_strict(language_mode())
2195 ? Runtime::kStoreLookupSlot_Strict
2196 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002197 } else {
2198 // Assignment to var or initializing assignment to let/const in harmony
2199 // mode.
2200 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2201 MemOperand location = VarOperand(var, x1);
2202 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2203 __ Ldr(x10, location);
2204 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2205 __ Check(eq, kLetBindingReInitialization);
2206 }
2207 EmitStoreToStackLocalOrContextSlot(var, location);
2208 }
2209
2210 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2211 // Const initializers need a write barrier.
2212 DCHECK(!var->IsParameter()); // No const parameters.
2213 if (var->IsLookupSlot()) {
2214 __ Mov(x1, Operand(var->name()));
2215 __ Push(x0, cp, x1);
2216 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2217 } else {
2218 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2219 Label skip;
2220 MemOperand location = VarOperand(var, x1);
2221 __ Ldr(x10, location);
2222 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2223 EmitStoreToStackLocalOrContextSlot(var, location);
2224 __ Bind(&skip);
2225 }
2226
2227 } else {
2228 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2229 if (is_strict(language_mode())) {
2230 __ CallRuntime(Runtime::kThrowConstAssignError);
2231 }
2232 // Silently ignore store in sloppy mode.
2233 }
2234}
2235
2236
2237void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2238 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2239 // Assignment to a property, using a named store IC.
2240 Property* prop = expr->target()->AsProperty();
2241 DCHECK(prop != NULL);
2242 DCHECK(prop->key()->IsLiteral());
2243
2244 __ Mov(StoreDescriptor::NameRegister(),
2245 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002246 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002247 EmitLoadStoreICSlot(expr->AssignmentSlot());
2248 CallStoreIC();
2249
2250 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2251 context()->Plug(x0);
2252}
2253
2254
2255void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2256 // Assignment to named property of super.
2257 // x0 : value
2258 // stack : receiver ('this'), home_object
2259 DCHECK(prop != NULL);
2260 Literal* key = prop->key()->AsLiteral();
2261 DCHECK(key != NULL);
2262
Ben Murdoch097c5b22016-05-18 11:27:45 +01002263 PushOperand(key->value());
2264 PushOperand(x0);
2265 CallRuntimeWithOperands(is_strict(language_mode())
2266 ? Runtime::kStoreToSuper_Strict
2267 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002268}
2269
2270
2271void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2272 // Assignment to named property of super.
2273 // x0 : value
2274 // stack : receiver ('this'), home_object, key
2275 DCHECK(prop != NULL);
2276
Ben Murdoch097c5b22016-05-18 11:27:45 +01002277 PushOperand(x0);
2278 CallRuntimeWithOperands(is_strict(language_mode())
2279 ? Runtime::kStoreKeyedToSuper_Strict
2280 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002281}
2282
2283
2284void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2285 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2286 // Assignment to a property, using a keyed store IC.
2287
2288 // TODO(all): Could we pass this in registers rather than on the stack?
Ben Murdoch097c5b22016-05-18 11:27:45 +01002289 PopOperands(StoreDescriptor::NameRegister(),
2290 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002291 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2292
2293 Handle<Code> ic =
2294 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2295 EmitLoadStoreICSlot(expr->AssignmentSlot());
2296 CallIC(ic);
2297
2298 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2299 context()->Plug(x0);
2300}
2301
2302
2303void FullCodeGenerator::VisitProperty(Property* expr) {
2304 Comment cmnt(masm_, "[ Property");
2305 SetExpressionPosition(expr);
2306 Expression* key = expr->key();
2307
2308 if (key->IsPropertyName()) {
2309 if (!expr->IsSuperAccess()) {
2310 VisitForAccumulatorValue(expr->obj());
2311 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2312 EmitNamedPropertyLoad(expr);
2313 } else {
2314 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2315 VisitForStackValue(
2316 expr->obj()->AsSuperPropertyReference()->home_object());
2317 EmitNamedSuperPropertyLoad(expr);
2318 }
2319 } else {
2320 if (!expr->IsSuperAccess()) {
2321 VisitForStackValue(expr->obj());
2322 VisitForAccumulatorValue(expr->key());
2323 __ Move(LoadDescriptor::NameRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002324 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002325 EmitKeyedPropertyLoad(expr);
2326 } else {
2327 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2328 VisitForStackValue(
2329 expr->obj()->AsSuperPropertyReference()->home_object());
2330 VisitForStackValue(expr->key());
2331 EmitKeyedSuperPropertyLoad(expr);
2332 }
2333 }
2334 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2335 context()->Plug(x0);
2336}
2337
2338
2339void FullCodeGenerator::CallIC(Handle<Code> code,
2340 TypeFeedbackId ast_id) {
2341 ic_total_count_++;
2342 // All calls must have a predictable size in full-codegen code to ensure that
2343 // the debugger can patch them correctly.
2344 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2345}
2346
2347
2348// Code common for calls using the IC.
2349void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2350 ASM_LOCATION("FullCodeGenerator::EmitCallWithLoadIC");
2351 Expression* callee = expr->expression();
2352
2353 // Get the target function.
2354 ConvertReceiverMode convert_mode;
2355 if (callee->IsVariableProxy()) {
2356 { StackValueContext context(this);
2357 EmitVariableLoad(callee->AsVariableProxy());
2358 PrepareForBailout(callee, NO_REGISTERS);
2359 }
2360 // Push undefined as receiver. This is patched in the method prologue if it
2361 // is a sloppy mode method.
2362 {
2363 UseScratchRegisterScope temps(masm_);
2364 Register temp = temps.AcquireX();
2365 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002366 PushOperand(temp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002367 }
2368 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2369 } else {
2370 // Load the function from the receiver.
2371 DCHECK(callee->IsProperty());
2372 DCHECK(!callee->AsProperty()->IsSuperAccess());
2373 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2374 EmitNamedPropertyLoad(callee->AsProperty());
2375 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2376 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002377 PopOperand(x10);
2378 PushOperands(x0, x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002379 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2380 }
2381
2382 EmitCall(expr, convert_mode);
2383}
2384
2385
2386void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2387 ASM_LOCATION("FullCodeGenerator::EmitSuperCallWithLoadIC");
2388 Expression* callee = expr->expression();
2389 DCHECK(callee->IsProperty());
2390 Property* prop = callee->AsProperty();
2391 DCHECK(prop->IsSuperAccess());
2392 SetExpressionPosition(prop);
2393
2394 Literal* key = prop->key()->AsLiteral();
2395 DCHECK(!key->value()->IsSmi());
2396
2397 // Load the function from the receiver.
2398 const Register scratch = x10;
2399 SuperPropertyReference* super_ref =
2400 callee->AsProperty()->obj()->AsSuperPropertyReference();
2401 VisitForStackValue(super_ref->home_object());
2402 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002403 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002404 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002405 PushOperands(x0, scratch);
2406 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002407
2408 // Stack here:
2409 // - home_object
2410 // - this (receiver)
2411 // - this (receiver) <-- LoadFromSuper will pop here and below.
2412 // - home_object
Ben Murdoch097c5b22016-05-18 11:27:45 +01002413 // - key
2414 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002415
2416 // Replace home_object with target function.
2417 __ Poke(x0, kPointerSize);
2418
2419 // Stack here:
2420 // - target function
2421 // - this (receiver)
2422 EmitCall(expr);
2423}
2424
2425
2426// Code common for calls using the IC.
2427void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2428 Expression* key) {
2429 ASM_LOCATION("FullCodeGenerator::EmitKeyedCallWithLoadIC");
2430 // Load the key.
2431 VisitForAccumulatorValue(key);
2432
2433 Expression* callee = expr->expression();
2434
2435 // Load the function from the receiver.
2436 DCHECK(callee->IsProperty());
2437 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2438 __ Move(LoadDescriptor::NameRegister(), x0);
2439 EmitKeyedPropertyLoad(callee->AsProperty());
2440 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2441
2442 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002443 PopOperand(x10);
2444 PushOperands(x0, x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002445
2446 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2447}
2448
2449
2450void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2451 ASM_LOCATION("FullCodeGenerator::EmitKeyedSuperCallWithLoadIC");
2452 Expression* callee = expr->expression();
2453 DCHECK(callee->IsProperty());
2454 Property* prop = callee->AsProperty();
2455 DCHECK(prop->IsSuperAccess());
2456 SetExpressionPosition(prop);
2457
2458 // Load the function from the receiver.
2459 const Register scratch = x10;
2460 SuperPropertyReference* super_ref =
2461 callee->AsProperty()->obj()->AsSuperPropertyReference();
2462 VisitForStackValue(super_ref->home_object());
2463 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002464 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002465 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002466 PushOperands(x0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002467 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002468
2469 // Stack here:
2470 // - home_object
2471 // - this (receiver)
2472 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2473 // - home_object
2474 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002475 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002476
2477 // Replace home_object with target function.
2478 __ Poke(x0, kPointerSize);
2479
2480 // Stack here:
2481 // - target function
2482 // - this (receiver)
2483 EmitCall(expr);
2484}
2485
2486
2487void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2488 ASM_LOCATION("FullCodeGenerator::EmitCall");
2489 // Load the arguments.
2490 ZoneList<Expression*>* args = expr->arguments();
2491 int arg_count = args->length();
2492 for (int i = 0; i < arg_count; i++) {
2493 VisitForStackValue(args->at(i));
2494 }
2495
2496 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2497 SetCallPosition(expr);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002498 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2499 if (FLAG_trace) {
2500 __ CallRuntime(Runtime::kTraceTailCall);
2501 }
2502 // Update profiling counters before the tail call since we will
2503 // not return to this function.
2504 EmitProfilingCounterHandlingForReturnSequence(true);
2505 }
2506 Handle<Code> ic =
2507 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2508 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002509 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2510 __ Peek(x1, (arg_count + 1) * kXRegSize);
2511 // Don't assign a type feedback id to the IC, since type feedback is provided
2512 // by the vector above.
2513 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002514 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002515
2516 RecordJSReturnSite(expr);
2517 // Restore context register.
2518 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2519 context()->DropAndPlug(1, x0);
2520}
2521
2522
2523void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2524 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2525 // Prepare to push a copy of the first argument or undefined if it doesn't
2526 // exist.
2527 if (arg_count > 0) {
2528 __ Peek(x9, arg_count * kXRegSize);
2529 } else {
2530 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2531 }
2532
2533 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2534
2535 // Prepare to push the language mode.
2536 __ Mov(x11, Smi::FromInt(language_mode()));
2537 // Prepare to push the start position of the scope the calls resides in.
2538 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2539
2540 // Push.
2541 __ Push(x9, x10, x11, x12);
2542
2543 // Do the runtime call.
2544 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2545}
2546
2547
2548// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2549void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2550 VariableProxy* callee = expr->expression()->AsVariableProxy();
2551 if (callee->var()->IsLookupSlot()) {
2552 Label slow, done;
2553 SetExpressionPosition(callee);
2554 // Generate code for loading from variables potentially shadowed
2555 // by eval-introduced variables.
2556 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2557
2558 __ Bind(&slow);
2559 // Call the runtime to find the function to call (returned in x0)
2560 // and the object holding it (returned in x1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002561 __ Push(callee->name());
2562 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2563 PushOperands(x0, x1); // Receiver, function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002564 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2565
2566 // If fast case code has been generated, emit code to push the
2567 // function and receiver and have the slow path jump around this
2568 // code.
2569 if (done.is_linked()) {
2570 Label call;
2571 __ B(&call);
2572 __ Bind(&done);
2573 // Push function.
2574 // The receiver is implicitly the global receiver. Indicate this
2575 // by passing the undefined to the call function stub.
2576 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2577 __ Push(x0, x1);
2578 __ Bind(&call);
2579 }
2580 } else {
2581 VisitForStackValue(callee);
2582 // refEnv.WithBaseObject()
2583 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002584 PushOperand(x10); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002585 }
2586}
2587
2588
2589void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2590 ASM_LOCATION("FullCodeGenerator::EmitPossiblyEvalCall");
2591 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2592 // to resolve the function we need to call. Then we call the resolved
2593 // function using the given arguments.
2594 ZoneList<Expression*>* args = expr->arguments();
2595 int arg_count = args->length();
2596
2597 PushCalleeAndWithBaseObject(expr);
2598
2599 // Push the arguments.
2600 for (int i = 0; i < arg_count; i++) {
2601 VisitForStackValue(args->at(i));
2602 }
2603
2604 // Push a copy of the function (found below the arguments) and
2605 // resolve eval.
2606 __ Peek(x10, (arg_count + 1) * kPointerSize);
2607 __ Push(x10);
2608 EmitResolvePossiblyDirectEval(arg_count);
2609
2610 // Touch up the stack with the resolved function.
2611 __ Poke(x0, (arg_count + 1) * kPointerSize);
2612
2613 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2614
2615 // Record source position for debugger.
2616 SetCallPosition(expr);
2617
2618 // Call the evaluated function.
2619 __ Peek(x1, (arg_count + 1) * kXRegSize);
2620 __ Mov(x0, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002621 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2622 expr->tail_call_mode()),
2623 RelocInfo::CODE_TARGET);
2624 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002625 RecordJSReturnSite(expr);
2626 // Restore context register.
2627 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2628 context()->DropAndPlug(1, x0);
2629}
2630
2631
2632void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2633 Comment cmnt(masm_, "[ CallNew");
2634 // According to ECMA-262, section 11.2.2, page 44, the function
2635 // expression in new calls must be evaluated before the
2636 // arguments.
2637
2638 // Push constructor on the stack. If it's not a function it's used as
2639 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2640 // ignored.
2641 DCHECK(!expr->expression()->IsSuperPropertyReference());
2642 VisitForStackValue(expr->expression());
2643
2644 // Push the arguments ("left-to-right") on the stack.
2645 ZoneList<Expression*>* args = expr->arguments();
2646 int arg_count = args->length();
2647 for (int i = 0; i < arg_count; i++) {
2648 VisitForStackValue(args->at(i));
2649 }
2650
2651 // Call the construct call builtin that handles allocation and
2652 // constructor invocation.
2653 SetConstructCallPosition(expr);
2654
2655 // Load function and argument count into x1 and x0.
2656 __ Mov(x0, arg_count);
2657 __ Peek(x1, arg_count * kXRegSize);
2658
2659 // Record call targets in unoptimized code.
2660 __ EmitLoadTypeFeedbackVector(x2);
2661 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2662
2663 CallConstructStub stub(isolate());
2664 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002665 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002666 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2667 // Restore context register.
2668 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2669 context()->Plug(x0);
2670}
2671
2672
2673void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2674 ASM_LOCATION("FullCodeGenerator::EmitSuperConstructorCall");
2675 SuperCallReference* super_call_ref =
2676 expr->expression()->AsSuperCallReference();
2677 DCHECK_NOT_NULL(super_call_ref);
2678
2679 // Push the super constructor target on the stack (may be null,
2680 // but the Construct builtin can deal with that properly).
2681 VisitForAccumulatorValue(super_call_ref->this_function_var());
2682 __ AssertFunction(result_register());
2683 __ Ldr(result_register(),
2684 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2685 __ Ldr(result_register(),
2686 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002687 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002688
2689 // Push the arguments ("left-to-right") on the stack.
2690 ZoneList<Expression*>* args = expr->arguments();
2691 int arg_count = args->length();
2692 for (int i = 0; i < arg_count; i++) {
2693 VisitForStackValue(args->at(i));
2694 }
2695
2696 // Call the construct call builtin that handles allocation and
2697 // constructor invocation.
2698 SetConstructCallPosition(expr);
2699
2700 // Load new target into x3.
2701 VisitForAccumulatorValue(super_call_ref->new_target_var());
2702 __ Mov(x3, result_register());
2703
2704 // Load function and argument count into x1 and x0.
2705 __ Mov(x0, arg_count);
2706 __ Peek(x1, arg_count * kXRegSize);
2707
2708 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002709 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002710
2711 RecordJSReturnSite(expr);
2712
2713 // Restore context register.
2714 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2715 context()->Plug(x0);
2716}
2717
2718
2719void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2720 ZoneList<Expression*>* args = expr->arguments();
2721 DCHECK(args->length() == 1);
2722
2723 VisitForAccumulatorValue(args->at(0));
2724
2725 Label materialize_true, materialize_false;
2726 Label* if_true = NULL;
2727 Label* if_false = NULL;
2728 Label* fall_through = NULL;
2729 context()->PrepareTest(&materialize_true, &materialize_false,
2730 &if_true, &if_false, &fall_through);
2731
2732 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2733 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2734
2735 context()->Plug(if_true, if_false);
2736}
2737
2738
2739void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2740 ZoneList<Expression*>* args = expr->arguments();
2741 DCHECK(args->length() == 1);
2742
2743 VisitForAccumulatorValue(args->at(0));
2744
2745 Label materialize_true, materialize_false;
2746 Label* if_true = NULL;
2747 Label* if_false = NULL;
2748 Label* fall_through = NULL;
2749 context()->PrepareTest(&materialize_true, &materialize_false,
2750 &if_true, &if_false, &fall_through);
2751
2752 __ JumpIfSmi(x0, if_false);
2753 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2754 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2755 Split(ge, if_true, if_false, fall_through);
2756
2757 context()->Plug(if_true, if_false);
2758}
2759
2760
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002761void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2762 ZoneList<Expression*>* args = expr->arguments();
2763 DCHECK(args->length() == 1);
2764
2765 VisitForAccumulatorValue(args->at(0));
2766
2767 Label materialize_true, materialize_false;
2768 Label* if_true = NULL;
2769 Label* if_false = NULL;
2770 Label* fall_through = NULL;
2771 context()->PrepareTest(&materialize_true, &materialize_false,
2772 &if_true, &if_false, &fall_through);
2773
2774 __ JumpIfSmi(x0, if_false);
2775 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2776 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2777 Split(eq, if_true, if_false, fall_through);
2778
2779 context()->Plug(if_true, if_false);
2780}
2781
2782
2783void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2784 ZoneList<Expression*>* args = expr->arguments();
2785 DCHECK(args->length() == 1);
2786
2787 VisitForAccumulatorValue(args->at(0));
2788
2789 Label materialize_true, materialize_false;
2790 Label* if_true = NULL;
2791 Label* if_false = NULL;
2792 Label* fall_through = NULL;
2793 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2794 &if_false, &fall_through);
2795
2796 __ JumpIfSmi(x0, if_false);
2797 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
2798 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2799 Split(eq, if_true, if_false, fall_through);
2800
2801 context()->Plug(if_true, if_false);
2802}
2803
2804
2805void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2806 ZoneList<Expression*>* args = expr->arguments();
2807 DCHECK(args->length() == 1);
2808
2809 VisitForAccumulatorValue(args->at(0));
2810
2811 Label materialize_true, materialize_false;
2812 Label* if_true = NULL;
2813 Label* if_false = NULL;
2814 Label* fall_through = NULL;
2815 context()->PrepareTest(&materialize_true, &materialize_false,
2816 &if_true, &if_false, &fall_through);
2817
2818 __ JumpIfSmi(x0, if_false);
2819 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2820 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2821 Split(eq, if_true, if_false, fall_through);
2822
2823 context()->Plug(if_true, if_false);
2824}
2825
2826
2827void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2828 ZoneList<Expression*>* args = expr->arguments();
2829 DCHECK(args->length() == 1);
2830
2831 VisitForAccumulatorValue(args->at(0));
2832
2833 Label materialize_true, materialize_false;
2834 Label* if_true = NULL;
2835 Label* if_false = NULL;
2836 Label* fall_through = NULL;
2837 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2838 &if_false, &fall_through);
2839
2840 __ JumpIfSmi(x0, if_false);
2841 __ CompareObjectType(x0, x10, x11, JS_PROXY_TYPE);
2842 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2843 Split(eq, if_true, if_false, fall_through);
2844
2845 context()->Plug(if_true, if_false);
2846}
2847
2848
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002849void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2850 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
2851 ZoneList<Expression*>* args = expr->arguments();
2852 DCHECK(args->length() == 1);
2853 Label done, null, function, non_function_constructor;
2854
2855 VisitForAccumulatorValue(args->at(0));
2856
2857 // If the object is not a JSReceiver, we return null.
2858 __ JumpIfSmi(x0, &null);
2859 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2860 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2861 // x10: object's map.
2862 // x11: object's type.
2863 __ B(lt, &null);
2864
2865 // Return 'Function' for JSFunction objects.
2866 __ Cmp(x11, JS_FUNCTION_TYPE);
2867 __ B(eq, &function);
2868
2869 // Check if the constructor in the map is a JS function.
2870 Register instance_type = x14;
2871 __ GetMapConstructor(x12, x10, x13, instance_type);
2872 __ Cmp(instance_type, JS_FUNCTION_TYPE);
2873 __ B(ne, &non_function_constructor);
2874
2875 // x12 now contains the constructor function. Grab the
2876 // instance class name from there.
2877 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
2878 __ Ldr(x0,
2879 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
2880 __ B(&done);
2881
2882 // Functions have class 'Function'.
2883 __ Bind(&function);
2884 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
2885 __ B(&done);
2886
2887 // Objects with a non-function constructor have class 'Object'.
2888 __ Bind(&non_function_constructor);
2889 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
2890 __ B(&done);
2891
2892 // Non-JS objects have class null.
2893 __ Bind(&null);
2894 __ LoadRoot(x0, Heap::kNullValueRootIndex);
2895
2896 // All done.
2897 __ Bind(&done);
2898
2899 context()->Plug(x0);
2900}
2901
2902
2903void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2904 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
2905 ZoneList<Expression*>* args = expr->arguments();
2906 DCHECK(args->length() == 1);
2907 VisitForAccumulatorValue(args->at(0)); // Load the object.
2908
2909 Label done;
2910 // If the object is a smi return the object.
2911 __ JumpIfSmi(x0, &done);
2912 // If the object is not a value type, return the object.
2913 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
2914 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
2915
2916 __ Bind(&done);
2917 context()->Plug(x0);
2918}
2919
2920
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002921void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2922 ZoneList<Expression*>* args = expr->arguments();
2923 DCHECK_EQ(3, args->length());
2924
2925 Register string = x0;
2926 Register index = x1;
2927 Register value = x2;
2928 Register scratch = x10;
2929
2930 VisitForStackValue(args->at(0)); // index
2931 VisitForStackValue(args->at(1)); // value
2932 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002933 PopOperands(value, index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002934
2935 if (FLAG_debug_code) {
2936 __ AssertSmi(value, kNonSmiValue);
2937 __ AssertSmi(index, kNonSmiIndex);
2938 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2939 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
2940 one_byte_seq_type);
2941 }
2942
2943 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
2944 __ SmiUntag(value);
2945 __ SmiUntag(index);
2946 __ Strb(value, MemOperand(scratch, index));
2947 context()->Plug(string);
2948}
2949
2950
2951void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2952 ZoneList<Expression*>* args = expr->arguments();
2953 DCHECK_EQ(3, args->length());
2954
2955 Register string = x0;
2956 Register index = x1;
2957 Register value = x2;
2958 Register scratch = x10;
2959
2960 VisitForStackValue(args->at(0)); // index
2961 VisitForStackValue(args->at(1)); // value
2962 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002963 PopOperands(value, index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002964
2965 if (FLAG_debug_code) {
2966 __ AssertSmi(value, kNonSmiValue);
2967 __ AssertSmi(index, kNonSmiIndex);
2968 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2969 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
2970 two_byte_seq_type);
2971 }
2972
2973 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
2974 __ SmiUntag(value);
2975 __ SmiUntag(index);
2976 __ Strh(value, MemOperand(scratch, index, LSL, 1));
2977 context()->Plug(string);
2978}
2979
2980
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002981void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
2982 ZoneList<Expression*>* args = expr->arguments();
2983 DCHECK_EQ(1, args->length());
2984
2985 // Load the argument into x0 and convert it.
2986 VisitForAccumulatorValue(args->at(0));
2987
2988 // Convert the object to an integer.
2989 Label done_convert;
2990 __ JumpIfSmi(x0, &done_convert);
2991 __ Push(x0);
2992 __ CallRuntime(Runtime::kToInteger);
2993 __ bind(&done_convert);
2994 context()->Plug(x0);
2995}
2996
2997
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002998void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2999 ZoneList<Expression*>* args = expr->arguments();
3000 DCHECK(args->length() == 1);
3001
3002 VisitForAccumulatorValue(args->at(0));
3003
3004 Label done;
3005 Register code = x0;
3006 Register result = x1;
3007
3008 StringCharFromCodeGenerator generator(code, result);
3009 generator.GenerateFast(masm_);
3010 __ B(&done);
3011
3012 NopRuntimeCallHelper call_helper;
3013 generator.GenerateSlow(masm_, call_helper);
3014
3015 __ Bind(&done);
3016 context()->Plug(result);
3017}
3018
3019
3020void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3021 ZoneList<Expression*>* args = expr->arguments();
3022 DCHECK(args->length() == 2);
3023
3024 VisitForStackValue(args->at(0));
3025 VisitForAccumulatorValue(args->at(1));
3026
3027 Register object = x1;
3028 Register index = x0;
3029 Register result = x3;
3030
Ben Murdoch097c5b22016-05-18 11:27:45 +01003031 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003032
3033 Label need_conversion;
3034 Label index_out_of_range;
3035 Label done;
3036 StringCharCodeAtGenerator generator(object,
3037 index,
3038 result,
3039 &need_conversion,
3040 &need_conversion,
3041 &index_out_of_range,
3042 STRING_INDEX_IS_NUMBER);
3043 generator.GenerateFast(masm_);
3044 __ B(&done);
3045
3046 __ Bind(&index_out_of_range);
3047 // When the index is out of range, the spec requires us to return NaN.
3048 __ LoadRoot(result, Heap::kNanValueRootIndex);
3049 __ B(&done);
3050
3051 __ Bind(&need_conversion);
3052 // Load the undefined value into the result register, which will
3053 // trigger conversion.
3054 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3055 __ B(&done);
3056
3057 NopRuntimeCallHelper call_helper;
3058 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3059
3060 __ Bind(&done);
3061 context()->Plug(result);
3062}
3063
3064
3065void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3066 ZoneList<Expression*>* args = expr->arguments();
3067 DCHECK(args->length() == 2);
3068
3069 VisitForStackValue(args->at(0));
3070 VisitForAccumulatorValue(args->at(1));
3071
3072 Register object = x1;
3073 Register index = x0;
3074 Register result = x0;
3075
Ben Murdoch097c5b22016-05-18 11:27:45 +01003076 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003077
3078 Label need_conversion;
3079 Label index_out_of_range;
3080 Label done;
3081 StringCharAtGenerator generator(object,
3082 index,
3083 x3,
3084 result,
3085 &need_conversion,
3086 &need_conversion,
3087 &index_out_of_range,
3088 STRING_INDEX_IS_NUMBER);
3089 generator.GenerateFast(masm_);
3090 __ B(&done);
3091
3092 __ Bind(&index_out_of_range);
3093 // When the index is out of range, the spec requires us to return
3094 // the empty string.
3095 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3096 __ B(&done);
3097
3098 __ Bind(&need_conversion);
3099 // Move smi zero into the result register, which will trigger conversion.
3100 __ Mov(result, Smi::FromInt(0));
3101 __ B(&done);
3102
3103 NopRuntimeCallHelper call_helper;
3104 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3105
3106 __ Bind(&done);
3107 context()->Plug(result);
3108}
3109
3110
3111void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3112 ASM_LOCATION("FullCodeGenerator::EmitCall");
3113 ZoneList<Expression*>* args = expr->arguments();
3114 DCHECK_LE(2, args->length());
3115 // Push target, receiver and arguments onto the stack.
3116 for (Expression* const arg : *args) {
3117 VisitForStackValue(arg);
3118 }
3119 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3120 // Move target to x1.
3121 int const argc = args->length() - 2;
3122 __ Peek(x1, (argc + 1) * kXRegSize);
3123 // Call the target.
3124 __ Mov(x0, argc);
3125 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003126 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003127 // Restore context register.
3128 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3129 // Discard the function left on TOS.
3130 context()->DropAndPlug(1, x0);
3131}
3132
3133
3134void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3135 ZoneList<Expression*>* args = expr->arguments();
3136 VisitForAccumulatorValue(args->at(0));
3137
3138 Label materialize_true, materialize_false;
3139 Label* if_true = NULL;
3140 Label* if_false = NULL;
3141 Label* fall_through = NULL;
3142 context()->PrepareTest(&materialize_true, &materialize_false,
3143 &if_true, &if_false, &fall_through);
3144
3145 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3146 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3147 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3148 Split(eq, if_true, if_false, fall_through);
3149
3150 context()->Plug(if_true, if_false);
3151}
3152
3153
3154void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3155 ZoneList<Expression*>* args = expr->arguments();
3156 DCHECK(args->length() == 1);
3157 VisitForAccumulatorValue(args->at(0));
3158
3159 __ AssertString(x0);
3160
3161 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3162 __ IndexFromHash(x10, x0);
3163
3164 context()->Plug(x0);
3165}
3166
3167
3168void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3169 ZoneList<Expression*>* args = expr->arguments();
3170 DCHECK_EQ(1, args->length());
3171 VisitForAccumulatorValue(args->at(0));
3172 __ AssertFunction(x0);
3173 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3174 __ Ldr(x0, FieldMemOperand(x0, Map::kPrototypeOffset));
3175 context()->Plug(x0);
3176}
3177
3178
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003179void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3180 DCHECK(expr->arguments()->length() == 0);
3181 ExternalReference debug_is_active =
3182 ExternalReference::debug_is_active_address(isolate());
3183 __ Mov(x10, debug_is_active);
3184 __ Ldrb(x0, MemOperand(x10));
3185 __ SmiTag(x0);
3186 context()->Plug(x0);
3187}
3188
3189
3190void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3191 ZoneList<Expression*>* args = expr->arguments();
3192 DCHECK_EQ(2, args->length());
3193 VisitForStackValue(args->at(0));
3194 VisitForStackValue(args->at(1));
3195
3196 Label runtime, done;
3197
3198 Register result = x0;
3199 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime, TAG_OBJECT);
3200 Register map_reg = x1;
3201 Register result_value = x2;
3202 Register boolean_done = x3;
3203 Register empty_fixed_array = x4;
3204 Register untagged_result = x5;
3205 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
3206 __ Pop(boolean_done);
3207 __ Pop(result_value);
3208 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3209 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3210 JSObject::kElementsOffset);
3211 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3212 JSIteratorResult::kDoneOffset);
3213 __ ObjectUntag(untagged_result, result);
3214 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3215 __ Stp(empty_fixed_array, empty_fixed_array,
3216 MemOperand(untagged_result, JSObject::kPropertiesOffset));
3217 __ Stp(result_value, boolean_done,
3218 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3219 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3220 __ B(&done);
3221
3222 __ Bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003223 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003224
3225 __ Bind(&done);
3226 context()->Plug(x0);
3227}
3228
3229
3230void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3231 // Push undefined as the receiver.
3232 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003233 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003234
3235 __ LoadNativeContextSlot(expr->context_index(), x0);
3236}
3237
3238
3239void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3240 ZoneList<Expression*>* args = expr->arguments();
3241 int arg_count = args->length();
3242
3243 SetCallPosition(expr);
3244 __ Peek(x1, (arg_count + 1) * kPointerSize);
3245 __ Mov(x0, arg_count);
3246 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3247 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003248 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003249}
3250
3251
3252void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3253 ZoneList<Expression*>* args = expr->arguments();
3254 int arg_count = args->length();
3255
3256 if (expr->is_jsruntime()) {
3257 Comment cmnt(masm_, "[ CallRunTime");
3258 EmitLoadJSRuntimeFunction(expr);
3259
3260 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003261 PopOperand(x10);
3262 PushOperands(x0, x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003263
3264 for (int i = 0; i < arg_count; i++) {
3265 VisitForStackValue(args->at(i));
3266 }
3267
3268 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3269 EmitCallJSRuntimeFunction(expr);
3270
3271 // Restore context register.
3272 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3273
3274 context()->DropAndPlug(1, x0);
3275
3276 } else {
3277 const Runtime::Function* function = expr->function();
3278 switch (function->function_id) {
3279#define CALL_INTRINSIC_GENERATOR(Name) \
3280 case Runtime::kInline##Name: { \
3281 Comment cmnt(masm_, "[ Inline" #Name); \
3282 return Emit##Name(expr); \
3283 }
3284 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3285#undef CALL_INTRINSIC_GENERATOR
3286 default: {
3287 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3288 // Push the arguments ("left-to-right").
3289 for (int i = 0; i < arg_count; i++) {
3290 VisitForStackValue(args->at(i));
3291 }
3292
3293 // Call the C runtime function.
3294 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3295 __ CallRuntime(expr->function(), arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003296 OperandStackDepthDecrement(arg_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003297 context()->Plug(x0);
3298 }
3299 }
3300 }
3301}
3302
3303
3304void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3305 switch (expr->op()) {
3306 case Token::DELETE: {
3307 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3308 Property* property = expr->expression()->AsProperty();
3309 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3310
3311 if (property != NULL) {
3312 VisitForStackValue(property->obj());
3313 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003314 CallRuntimeWithOperands(is_strict(language_mode())
3315 ? Runtime::kDeleteProperty_Strict
3316 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003317 context()->Plug(x0);
3318 } else if (proxy != NULL) {
3319 Variable* var = proxy->var();
3320 // Delete of an unqualified identifier is disallowed in strict mode but
3321 // "delete this" is allowed.
3322 bool is_this = var->HasThisName(isolate());
3323 DCHECK(is_sloppy(language_mode()) || is_this);
3324 if (var->IsUnallocatedOrGlobalSlot()) {
3325 __ LoadGlobalObject(x12);
3326 __ Mov(x11, Operand(var->name()));
3327 __ Push(x12, x11);
3328 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3329 context()->Plug(x0);
3330 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3331 // Result of deleting non-global, non-dynamic variables is false.
3332 // The subexpression does not have side effects.
3333 context()->Plug(is_this);
3334 } else {
3335 // Non-global variable. Call the runtime to try to delete from the
3336 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003337 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003338 __ CallRuntime(Runtime::kDeleteLookupSlot);
3339 context()->Plug(x0);
3340 }
3341 } else {
3342 // Result of deleting non-property, non-variable reference is true.
3343 // The subexpression may have side effects.
3344 VisitForEffect(expr->expression());
3345 context()->Plug(true);
3346 }
3347 break;
3348 break;
3349 }
3350 case Token::VOID: {
3351 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3352 VisitForEffect(expr->expression());
3353 context()->Plug(Heap::kUndefinedValueRootIndex);
3354 break;
3355 }
3356 case Token::NOT: {
3357 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3358 if (context()->IsEffect()) {
3359 // Unary NOT has no side effects so it's only necessary to visit the
3360 // subexpression. Match the optimizing compiler by not branching.
3361 VisitForEffect(expr->expression());
3362 } else if (context()->IsTest()) {
3363 const TestContext* test = TestContext::cast(context());
3364 // The labels are swapped for the recursive call.
3365 VisitForControl(expr->expression(),
3366 test->false_label(),
3367 test->true_label(),
3368 test->fall_through());
3369 context()->Plug(test->true_label(), test->false_label());
3370 } else {
3371 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3372 // TODO(jbramley): This could be much more efficient using (for
3373 // example) the CSEL instruction.
3374 Label materialize_true, materialize_false, done;
3375 VisitForControl(expr->expression(),
3376 &materialize_false,
3377 &materialize_true,
3378 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003379 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003380
3381 __ Bind(&materialize_true);
3382 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3383 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3384 __ B(&done);
3385
3386 __ Bind(&materialize_false);
3387 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3388 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3389 __ B(&done);
3390
3391 __ Bind(&done);
3392 if (context()->IsStackValue()) {
3393 __ Push(result_register());
3394 }
3395 }
3396 break;
3397 }
3398 case Token::TYPEOF: {
3399 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3400 {
3401 AccumulatorValueContext context(this);
3402 VisitForTypeofValue(expr->expression());
3403 }
3404 __ Mov(x3, x0);
3405 TypeofStub typeof_stub(isolate());
3406 __ CallStub(&typeof_stub);
3407 context()->Plug(x0);
3408 break;
3409 }
3410 default:
3411 UNREACHABLE();
3412 }
3413}
3414
3415
3416void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3417 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3418
3419 Comment cmnt(masm_, "[ CountOperation");
3420
3421 Property* prop = expr->expression()->AsProperty();
3422 LhsKind assign_type = Property::GetAssignType(prop);
3423
3424 // Evaluate expression and get value.
3425 if (assign_type == VARIABLE) {
3426 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3427 AccumulatorValueContext context(this);
3428 EmitVariableLoad(expr->expression()->AsVariableProxy());
3429 } else {
3430 // Reserve space for result of postfix operation.
3431 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003432 PushOperand(xzr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003433 }
3434 switch (assign_type) {
3435 case NAMED_PROPERTY: {
3436 // Put the object both on the stack and in the register.
3437 VisitForStackValue(prop->obj());
3438 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
3439 EmitNamedPropertyLoad(prop);
3440 break;
3441 }
3442
3443 case NAMED_SUPER_PROPERTY: {
3444 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3445 VisitForAccumulatorValue(
3446 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003447 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003448 const Register scratch = x10;
3449 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003450 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003451 EmitNamedSuperPropertyLoad(prop);
3452 break;
3453 }
3454
3455 case KEYED_SUPER_PROPERTY: {
3456 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3457 VisitForStackValue(
3458 prop->obj()->AsSuperPropertyReference()->home_object());
3459 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003460 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003461 const Register scratch1 = x10;
3462 const Register scratch2 = x11;
3463 __ Peek(scratch1, 2 * kPointerSize);
3464 __ Peek(scratch2, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003465 PushOperands(scratch1, scratch2, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003466 EmitKeyedSuperPropertyLoad(prop);
3467 break;
3468 }
3469
3470 case KEYED_PROPERTY: {
3471 VisitForStackValue(prop->obj());
3472 VisitForStackValue(prop->key());
3473 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
3474 __ Peek(LoadDescriptor::NameRegister(), 0);
3475 EmitKeyedPropertyLoad(prop);
3476 break;
3477 }
3478
3479 case VARIABLE:
3480 UNREACHABLE();
3481 }
3482 }
3483
3484 // We need a second deoptimization point after loading the value
3485 // in case evaluating the property load my have a side effect.
3486 if (assign_type == VARIABLE) {
3487 PrepareForBailout(expr->expression(), TOS_REG);
3488 } else {
3489 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3490 }
3491
3492 // Inline smi case if we are in a loop.
3493 Label stub_call, done;
3494 JumpPatchSite patch_site(masm_);
3495
3496 int count_value = expr->op() == Token::INC ? 1 : -1;
3497 if (ShouldInlineSmiCase(expr->op())) {
3498 Label slow;
3499 patch_site.EmitJumpIfNotSmi(x0, &slow);
3500
3501 // Save result for postfix expressions.
3502 if (expr->is_postfix()) {
3503 if (!context()->IsEffect()) {
3504 // Save the result on the stack. If we have a named or keyed property we
3505 // store the result under the receiver that is currently on top of the
3506 // stack.
3507 switch (assign_type) {
3508 case VARIABLE:
3509 __ Push(x0);
3510 break;
3511 case NAMED_PROPERTY:
3512 __ Poke(x0, kPointerSize);
3513 break;
3514 case NAMED_SUPER_PROPERTY:
3515 __ Poke(x0, kPointerSize * 2);
3516 break;
3517 case KEYED_PROPERTY:
3518 __ Poke(x0, kPointerSize * 2);
3519 break;
3520 case KEYED_SUPER_PROPERTY:
3521 __ Poke(x0, kPointerSize * 3);
3522 break;
3523 }
3524 }
3525 }
3526
3527 __ Adds(x0, x0, Smi::FromInt(count_value));
3528 __ B(vc, &done);
3529 // Call stub. Undo operation first.
3530 __ Sub(x0, x0, Smi::FromInt(count_value));
3531 __ B(&stub_call);
3532 __ Bind(&slow);
3533 }
3534 if (!is_strong(language_mode())) {
3535 ToNumberStub convert_stub(isolate());
3536 __ CallStub(&convert_stub);
3537 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3538 }
3539
3540 // Save result for postfix expressions.
3541 if (expr->is_postfix()) {
3542 if (!context()->IsEffect()) {
3543 // Save the result on the stack. If we have a named or keyed property
3544 // we store the result under the receiver that is currently on top
3545 // of the stack.
3546 switch (assign_type) {
3547 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003548 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003549 break;
3550 case NAMED_PROPERTY:
3551 __ Poke(x0, kXRegSize);
3552 break;
3553 case NAMED_SUPER_PROPERTY:
3554 __ Poke(x0, 2 * kXRegSize);
3555 break;
3556 case KEYED_PROPERTY:
3557 __ Poke(x0, 2 * kXRegSize);
3558 break;
3559 case KEYED_SUPER_PROPERTY:
3560 __ Poke(x0, 3 * kXRegSize);
3561 break;
3562 }
3563 }
3564 }
3565
3566 __ Bind(&stub_call);
3567 __ Mov(x1, x0);
3568 __ Mov(x0, Smi::FromInt(count_value));
3569
3570 SetExpressionPosition(expr);
3571
3572 {
3573 Assembler::BlockPoolsScope scope(masm_);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003574 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003575 CallIC(code, expr->CountBinOpFeedbackId());
3576 patch_site.EmitPatchInfo();
3577 }
3578 __ Bind(&done);
3579
3580 if (is_strong(language_mode())) {
3581 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
3582 }
3583 // Store the value returned in x0.
3584 switch (assign_type) {
3585 case VARIABLE:
3586 if (expr->is_postfix()) {
3587 { EffectContext context(this);
3588 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3589 Token::ASSIGN, expr->CountSlot());
3590 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3591 context.Plug(x0);
3592 }
3593 // For all contexts except EffectConstant We have the result on
3594 // top of the stack.
3595 if (!context()->IsEffect()) {
3596 context()->PlugTOS();
3597 }
3598 } else {
3599 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3600 Token::ASSIGN, expr->CountSlot());
3601 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3602 context()->Plug(x0);
3603 }
3604 break;
3605 case NAMED_PROPERTY: {
3606 __ Mov(StoreDescriptor::NameRegister(),
3607 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003608 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003609 EmitLoadStoreICSlot(expr->CountSlot());
3610 CallStoreIC();
3611 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3612 if (expr->is_postfix()) {
3613 if (!context()->IsEffect()) {
3614 context()->PlugTOS();
3615 }
3616 } else {
3617 context()->Plug(x0);
3618 }
3619 break;
3620 }
3621 case NAMED_SUPER_PROPERTY: {
3622 EmitNamedSuperPropertyStore(prop);
3623 if (expr->is_postfix()) {
3624 if (!context()->IsEffect()) {
3625 context()->PlugTOS();
3626 }
3627 } else {
3628 context()->Plug(x0);
3629 }
3630 break;
3631 }
3632 case KEYED_SUPER_PROPERTY: {
3633 EmitKeyedSuperPropertyStore(prop);
3634 if (expr->is_postfix()) {
3635 if (!context()->IsEffect()) {
3636 context()->PlugTOS();
3637 }
3638 } else {
3639 context()->Plug(x0);
3640 }
3641 break;
3642 }
3643 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003644 PopOperand(StoreDescriptor::NameRegister());
3645 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003646 Handle<Code> ic =
3647 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3648 EmitLoadStoreICSlot(expr->CountSlot());
3649 CallIC(ic);
3650 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3651 if (expr->is_postfix()) {
3652 if (!context()->IsEffect()) {
3653 context()->PlugTOS();
3654 }
3655 } else {
3656 context()->Plug(x0);
3657 }
3658 break;
3659 }
3660 }
3661}
3662
3663
3664void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3665 Expression* sub_expr,
3666 Handle<String> check) {
3667 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
3668 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
3669 Label materialize_true, materialize_false;
3670 Label* if_true = NULL;
3671 Label* if_false = NULL;
3672 Label* fall_through = NULL;
3673 context()->PrepareTest(&materialize_true, &materialize_false,
3674 &if_true, &if_false, &fall_through);
3675
3676 { AccumulatorValueContext context(this);
3677 VisitForTypeofValue(sub_expr);
3678 }
3679 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3680
3681 Factory* factory = isolate()->factory();
3682 if (String::Equals(check, factory->number_string())) {
3683 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
3684 __ JumpIfSmi(x0, if_true);
3685 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3686 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
3687 Split(eq, if_true, if_false, fall_through);
3688 } else if (String::Equals(check, factory->string_string())) {
3689 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
3690 __ JumpIfSmi(x0, if_false);
3691 __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
3692 Split(lt, if_true, if_false, fall_through);
3693 } else if (String::Equals(check, factory->symbol_string())) {
3694 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
3695 __ JumpIfSmi(x0, if_false);
3696 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
3697 Split(eq, if_true, if_false, fall_through);
3698 } else if (String::Equals(check, factory->boolean_string())) {
3699 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
3700 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
3701 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
3702 Split(eq, if_true, if_false, fall_through);
3703 } else if (String::Equals(check, factory->undefined_string())) {
3704 ASM_LOCATION(
3705 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
Ben Murdoch097c5b22016-05-18 11:27:45 +01003706 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003707 __ JumpIfSmi(x0, if_false);
3708 // Check for undetectable objects => true.
3709 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3710 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3711 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3712 fall_through);
3713 } else if (String::Equals(check, factory->function_string())) {
3714 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
3715 __ JumpIfSmi(x0, if_false);
3716 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3717 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3718 __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3719 __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true,
3720 if_false, fall_through);
3721 } else if (String::Equals(check, factory->object_string())) {
3722 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
3723 __ JumpIfSmi(x0, if_false);
3724 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3725 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3726 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, if_false, lt);
3727 // Check for callable or undetectable objects => false.
3728 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
3729 __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable),
3730 if_true, if_false, fall_through);
3731// clang-format off
3732#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3733 } else if (String::Equals(check, factory->type##_string())) { \
3734 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
3735 #type "_string"); \
3736 __ JumpIfSmi(x0, if_true); \
3737 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); \
3738 __ CompareRoot(x0, Heap::k##Type##MapRootIndex); \
3739 Split(eq, if_true, if_false, fall_through);
3740 SIMD128_TYPES(SIMD128_TYPE)
3741#undef SIMD128_TYPE
3742 // clang-format on
3743 } else {
3744 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
3745 if (if_false != fall_through) __ B(if_false);
3746 }
3747 context()->Plug(if_true, if_false);
3748}
3749
3750
3751void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3752 Comment cmnt(masm_, "[ CompareOperation");
3753 SetExpressionPosition(expr);
3754
3755 // Try to generate an optimized comparison with a literal value.
3756 // TODO(jbramley): This only checks common values like NaN or undefined.
3757 // Should it also handle ARM64 immediate operands?
3758 if (TryLiteralCompare(expr)) {
3759 return;
3760 }
3761
3762 // Assign labels according to context()->PrepareTest.
3763 Label materialize_true;
3764 Label materialize_false;
3765 Label* if_true = NULL;
3766 Label* if_false = NULL;
3767 Label* fall_through = NULL;
3768 context()->PrepareTest(&materialize_true, &materialize_false,
3769 &if_true, &if_false, &fall_through);
3770
3771 Token::Value op = expr->op();
3772 VisitForStackValue(expr->left());
3773 switch (op) {
3774 case Token::IN:
3775 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003776 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003777 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3778 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3779 Split(eq, if_true, if_false, fall_through);
3780 break;
3781
3782 case Token::INSTANCEOF: {
3783 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003784 PopOperand(x1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003785 InstanceOfStub stub(isolate());
3786 __ CallStub(&stub);
3787 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3788 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3789 Split(eq, if_true, if_false, fall_through);
3790 break;
3791 }
3792
3793 default: {
3794 VisitForAccumulatorValue(expr->right());
3795 Condition cond = CompareIC::ComputeCondition(op);
3796
3797 // Pop the stack value.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003798 PopOperand(x1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003799
3800 JumpPatchSite patch_site(masm_);
3801 if (ShouldInlineSmiCase(op)) {
3802 Label slow_case;
3803 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
3804 __ Cmp(x1, x0);
3805 Split(cond, if_true, if_false, NULL);
3806 __ Bind(&slow_case);
3807 }
3808
Ben Murdoch097c5b22016-05-18 11:27:45 +01003809 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003810 CallIC(ic, expr->CompareOperationFeedbackId());
3811 patch_site.EmitPatchInfo();
3812 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3813 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
3814 }
3815 }
3816
3817 // Convert the result of the comparison into one expected for this
3818 // expression's context.
3819 context()->Plug(if_true, if_false);
3820}
3821
3822
3823void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3824 Expression* sub_expr,
3825 NilValue nil) {
3826 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
3827 Label materialize_true, materialize_false;
3828 Label* if_true = NULL;
3829 Label* if_false = NULL;
3830 Label* fall_through = NULL;
3831 context()->PrepareTest(&materialize_true, &materialize_false,
3832 &if_true, &if_false, &fall_through);
3833
3834 VisitForAccumulatorValue(sub_expr);
3835 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3836
3837 if (expr->op() == Token::EQ_STRICT) {
3838 Heap::RootListIndex nil_value = nil == kNullValue ?
3839 Heap::kNullValueRootIndex :
3840 Heap::kUndefinedValueRootIndex;
3841 __ CompareRoot(x0, nil_value);
3842 Split(eq, if_true, if_false, fall_through);
3843 } else {
3844 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
3845 CallIC(ic, expr->CompareOperationFeedbackId());
3846 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3847 Split(eq, if_true, if_false, fall_through);
3848 }
3849
3850 context()->Plug(if_true, if_false);
3851}
3852
3853
3854void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
3855 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3856 context()->Plug(x0);
3857}
3858
3859
3860void FullCodeGenerator::VisitYield(Yield* expr) {
3861 Comment cmnt(masm_, "[ Yield");
3862 SetExpressionPosition(expr);
3863
3864 // Evaluate yielded value first; the initial iterator definition depends on
3865 // this. It stays on the stack while we update the iterator.
3866 VisitForStackValue(expr->expression());
3867
3868 // TODO(jbramley): Tidy this up once the merge is done, using named registers
3869 // and suchlike. The implementation changes a little by bleeding_edge so I
3870 // don't want to spend too much time on it now.
3871
3872 switch (expr->yield_kind()) {
3873 case Yield::kSuspend:
3874 // Pop value from top-of-stack slot; box result into result register.
3875 EmitCreateIteratorResult(false);
3876 __ Push(result_register());
3877 // Fall through.
3878 case Yield::kInitial: {
3879 Label suspend, continuation, post_runtime, resume;
3880
3881 __ B(&suspend);
3882 // TODO(jbramley): This label is bound here because the following code
3883 // looks at its pos(). Is it possible to do something more efficient here,
3884 // perhaps using Adr?
3885 __ Bind(&continuation);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003886 // When we arrive here, the stack top is the resume mode and
3887 // result_register() holds the input value (the argument given to the
3888 // respective resume operation).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003889 __ RecordGeneratorContinuation();
Ben Murdoch097c5b22016-05-18 11:27:45 +01003890 __ Pop(x1);
3891 __ Cmp(x1, Smi::FromInt(JSGeneratorObject::RETURN));
3892 __ B(ne, &resume);
3893 __ Push(result_register());
3894 EmitCreateIteratorResult(true);
3895 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003896
3897 __ Bind(&suspend);
3898 VisitForAccumulatorValue(expr->generator_object());
3899 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
3900 __ Mov(x1, Smi::FromInt(continuation.pos()));
3901 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
3902 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
3903 __ Mov(x1, cp);
3904 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
3905 kLRHasBeenSaved, kDontSaveFPRegs);
3906 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
3907 __ Cmp(__ StackPointer(), x1);
3908 __ B(eq, &post_runtime);
3909 __ Push(x0); // generator object
3910 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
3911 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3912 __ Bind(&post_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003913 PopOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003914 EmitReturnSequence();
3915
3916 __ Bind(&resume);
3917 context()->Plug(result_register());
3918 break;
3919 }
3920
3921 case Yield::kFinal: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003922 // Pop value from top-of-stack slot, box result into result register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003923 OperandStackDepthDecrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003924 EmitCreateIteratorResult(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003925 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003926 break;
3927 }
3928
Ben Murdoch097c5b22016-05-18 11:27:45 +01003929 case Yield::kDelegating:
3930 UNREACHABLE();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003931 }
3932}
3933
3934
3935void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
3936 Expression *value,
3937 JSGeneratorObject::ResumeMode resume_mode) {
3938 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
3939 Register generator_object = x1;
3940 Register the_hole = x2;
3941 Register operand_stack_size = w3;
3942 Register function = x4;
3943
3944 // The value stays in x0, and is ultimately read by the resumed generator, as
3945 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
3946 // is read to throw the value when the resumed generator is already closed. x1
3947 // will hold the generator object until the activation has been resumed.
3948 VisitForStackValue(generator);
3949 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003950 PopOperand(generator_object);
3951
3952 // Store input value into generator object.
3953 __ Str(result_register(),
3954 FieldMemOperand(x1, JSGeneratorObject::kInputOffset));
3955 __ Mov(x2, result_register());
3956 __ RecordWriteField(x1, JSGeneratorObject::kInputOffset, x2, x3,
3957 kLRHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003958
3959 // Load suspended function and context.
3960 __ Ldr(cp, FieldMemOperand(generator_object,
3961 JSGeneratorObject::kContextOffset));
3962 __ Ldr(function, FieldMemOperand(generator_object,
3963 JSGeneratorObject::kFunctionOffset));
3964
3965 // Load receiver and store as the first argument.
3966 __ Ldr(x10, FieldMemOperand(generator_object,
3967 JSGeneratorObject::kReceiverOffset));
3968 __ Push(x10);
3969
3970 // Push holes for the rest of the arguments to the generator function.
3971 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
3972
3973 // The number of arguments is stored as an int32_t, and -1 is a marker
3974 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
3975 // extension to correctly handle it. However, in this case, we operate on
3976 // 32-bit W registers, so extension isn't required.
3977 __ Ldr(w10, FieldMemOperand(x10,
3978 SharedFunctionInfo::kFormalParameterCountOffset));
3979 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
3980 __ PushMultipleTimes(the_hole, w10);
3981
3982 // Enter a new JavaScript frame, and initialize its slots as they were when
3983 // the generator was suspended.
3984 Label resume_frame, done;
3985 __ Bl(&resume_frame);
3986 __ B(&done);
3987
3988 __ Bind(&resume_frame);
3989 __ Push(lr, // Return address.
3990 fp, // Caller's frame pointer.
3991 cp, // Callee's context.
3992 function); // Callee's JS Function.
3993 __ Add(fp, __ StackPointer(), kPointerSize * 2);
3994
3995 // Load and untag the operand stack size.
3996 __ Ldr(x10, FieldMemOperand(generator_object,
3997 JSGeneratorObject::kOperandStackOffset));
3998 __ Ldr(operand_stack_size,
3999 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4000
4001 // If we are sending a value and there is no operand stack, we can jump back
4002 // in directly.
4003 if (resume_mode == JSGeneratorObject::NEXT) {
4004 Label slow_resume;
4005 __ Cbnz(operand_stack_size, &slow_resume);
4006 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4007 __ Ldrsw(x11,
4008 UntagSmiFieldMemOperand(generator_object,
4009 JSGeneratorObject::kContinuationOffset));
4010 __ Add(x10, x10, x11);
4011 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
4012 __ Str(x12, FieldMemOperand(generator_object,
4013 JSGeneratorObject::kContinuationOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004014 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004015 __ Br(x10);
4016
4017 __ Bind(&slow_resume);
4018 }
4019
4020 // Otherwise, we push holes for the operand stack and call the runtime to fix
4021 // up the stack and the handlers.
4022 __ PushMultipleTimes(the_hole, operand_stack_size);
4023
4024 __ Mov(x10, Smi::FromInt(resume_mode));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004025 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004026 __ Push(generator_object, result_register(), x10);
4027 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
4028 // Not reached: the runtime call returns elsewhere.
4029 __ Unreachable();
4030
4031 __ Bind(&done);
4032 context()->Plug(result_register());
4033}
4034
Ben Murdoch097c5b22016-05-18 11:27:45 +01004035void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
4036 OperandStackDepthIncrement(2);
4037 __ Push(reg1, reg2);
4038}
4039
4040void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
4041 Register reg3) {
4042 OperandStackDepthIncrement(3);
4043 __ Push(reg1, reg2, reg3);
4044}
4045
4046void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
4047 OperandStackDepthDecrement(2);
4048 __ Pop(reg1, reg2);
4049}
4050
4051void FullCodeGenerator::EmitOperandStackDepthCheck() {
4052 if (FLAG_debug_code) {
4053 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
4054 operand_stack_depth_ * kPointerSize;
4055 __ Sub(x0, fp, jssp);
4056 __ Cmp(x0, Operand(expected_diff));
4057 __ Assert(eq, kUnexpectedStackDepth);
4058 }
4059}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004060
4061void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4062 Label allocate, done_allocate;
4063
4064 // Allocate and populate an object with this form: { value: VAL, done: DONE }
4065
4066 Register result = x0;
4067 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate, TAG_OBJECT);
4068 __ B(&done_allocate);
4069
4070 __ Bind(&allocate);
4071 __ Push(Smi::FromInt(JSIteratorResult::kSize));
4072 __ CallRuntime(Runtime::kAllocateInNewSpace);
4073
4074 __ Bind(&done_allocate);
4075 Register map_reg = x1;
4076 Register result_value = x2;
4077 Register boolean_done = x3;
4078 Register empty_fixed_array = x4;
4079 Register untagged_result = x5;
4080 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
4081 __ Pop(result_value);
4082 __ LoadRoot(boolean_done,
4083 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
4084 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
4085 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
4086 JSObject::kElementsOffset);
4087 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
4088 JSIteratorResult::kDoneOffset);
4089 __ ObjectUntag(untagged_result, result);
4090 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4091 __ Stp(empty_fixed_array, empty_fixed_array,
4092 MemOperand(untagged_result, JSObject::kPropertiesOffset));
4093 __ Stp(result_value, boolean_done,
4094 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
4095 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4096}
4097
4098
4099// TODO(all): I don't like this method.
4100// It seems to me that in too many places x0 is used in place of this.
4101// Also, this function is not suitable for all places where x0 should be
4102// abstracted (eg. when used as an argument). But some places assume that the
4103// first argument register is x0, and use this function instead.
4104// Considering that most of the register allocation is hard-coded in the
4105// FullCodeGen, that it is unlikely we will need to change it extensively, and
4106// that abstracting the allocation through functions would not yield any
4107// performance benefit, I think the existence of this function is debatable.
4108Register FullCodeGenerator::result_register() {
4109 return x0;
4110}
4111
4112
4113Register FullCodeGenerator::context_register() {
4114 return cp;
4115}
4116
4117
4118void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4119 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4120 __ Str(value, MemOperand(fp, frame_offset));
4121}
4122
4123
4124void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4125 __ Ldr(dst, ContextMemOperand(cp, context_index));
4126}
4127
4128
4129void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4130 Scope* closure_scope = scope()->ClosureScope();
4131 if (closure_scope->is_script_scope() ||
4132 closure_scope->is_module_scope()) {
4133 // Contexts nested in the native context have a canonical empty function
4134 // as their closure, not the anonymous closure containing the global
4135 // code.
4136 DCHECK(kSmiTag == 0);
4137 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, x10);
4138 } else if (closure_scope->is_eval_scope()) {
4139 // Contexts created by a call to eval have the same closure as the
4140 // context calling eval, not the anonymous closure containing the eval
4141 // code. Fetch it from the context.
4142 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4143 } else {
4144 DCHECK(closure_scope->is_function_scope());
4145 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4146 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004147 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004148}
4149
4150
4151void FullCodeGenerator::EnterFinallyBlock() {
4152 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4153 DCHECK(!result_register().is(x10));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004154 // Store pending message while executing finally block.
4155 ExternalReference pending_message_obj =
4156 ExternalReference::address_of_pending_message_obj(isolate());
4157 __ Mov(x10, pending_message_obj);
4158 __ Ldr(x10, MemOperand(x10));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004159 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004160
4161 ClearPendingMessage();
4162}
4163
4164
4165void FullCodeGenerator::ExitFinallyBlock() {
4166 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4167 DCHECK(!result_register().is(x10));
4168
4169 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004170 PopOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004171 ExternalReference pending_message_obj =
4172 ExternalReference::address_of_pending_message_obj(isolate());
4173 __ Mov(x13, pending_message_obj);
4174 __ Str(x10, MemOperand(x13));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004175}
4176
4177
4178void FullCodeGenerator::ClearPendingMessage() {
4179 DCHECK(!result_register().is(x10));
4180 ExternalReference pending_message_obj =
4181 ExternalReference::address_of_pending_message_obj(isolate());
4182 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
4183 __ Mov(x13, pending_message_obj);
4184 __ Str(x10, MemOperand(x13));
4185}
4186
4187
4188void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4189 DCHECK(!slot.IsInvalid());
4190 __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
4191}
4192
Ben Murdoch097c5b22016-05-18 11:27:45 +01004193void FullCodeGenerator::DeferredCommands::EmitCommands() {
4194 __ Pop(result_register(), x1); // Restore the accumulator and get the token.
4195 for (DeferredCommand cmd : commands_) {
4196 Label skip;
4197 __ Cmp(x1, Operand(Smi::FromInt(cmd.token)));
4198 __ B(ne, &skip);
4199 switch (cmd.command) {
4200 case kReturn:
4201 codegen_->EmitUnwindAndReturn();
4202 break;
4203 case kThrow:
4204 __ Push(result_register());
4205 __ CallRuntime(Runtime::kReThrow);
4206 break;
4207 case kContinue:
4208 codegen_->EmitContinue(cmd.target);
4209 break;
4210 case kBreak:
4211 codegen_->EmitBreak(cmd.target);
4212 break;
4213 }
4214 __ bind(&skip);
4215 }
4216}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004217
4218#undef __
4219
4220
4221void BackEdgeTable::PatchAt(Code* unoptimized_code,
4222 Address pc,
4223 BackEdgeState target_state,
4224 Code* replacement_code) {
4225 // Turn the jump into a nop.
4226 Address branch_address = pc - 3 * kInstructionSize;
4227 Isolate* isolate = unoptimized_code->GetIsolate();
4228 PatchingAssembler patcher(isolate, branch_address, 1);
4229
4230 DCHECK(Instruction::Cast(branch_address)
4231 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4232 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4233 Instruction::Cast(branch_address)->ImmPCOffset() ==
4234 6 * kInstructionSize));
4235
4236 switch (target_state) {
4237 case INTERRUPT:
4238 // <decrement profiling counter>
4239 // .. .. .. .. b.pl ok
4240 // .. .. .. .. ldr x16, pc+<interrupt stub address>
4241 // .. .. .. .. blr x16
4242 // ... more instructions.
4243 // ok-label
4244 // Jump offset is 6 instructions.
4245 patcher.b(6, pl);
4246 break;
4247 case ON_STACK_REPLACEMENT:
4248 case OSR_AFTER_STACK_CHECK:
4249 // <decrement profiling counter>
4250 // .. .. .. .. mov x0, x0 (NOP)
4251 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4252 // .. .. .. .. blr x16
4253 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4254 break;
4255 }
4256
4257 // Replace the call address.
4258 Instruction* load = Instruction::Cast(pc)->preceding(2);
4259 Address interrupt_address_pointer =
4260 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4261 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
4262 reinterpret_cast<uint64_t>(
4263 isolate->builtins()->OnStackReplacement()->entry())) ||
4264 (Memory::uint64_at(interrupt_address_pointer) ==
4265 reinterpret_cast<uint64_t>(
4266 isolate->builtins()->InterruptCheck()->entry())) ||
4267 (Memory::uint64_at(interrupt_address_pointer) ==
4268 reinterpret_cast<uint64_t>(
4269 isolate->builtins()->OsrAfterStackCheck()->entry())) ||
4270 (Memory::uint64_at(interrupt_address_pointer) ==
4271 reinterpret_cast<uint64_t>(
4272 isolate->builtins()->OnStackReplacement()->entry())));
4273 Memory::uint64_at(interrupt_address_pointer) =
4274 reinterpret_cast<uint64_t>(replacement_code->entry());
4275
4276 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4277 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4278}
4279
4280
4281BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4282 Isolate* isolate,
4283 Code* unoptimized_code,
4284 Address pc) {
4285 // TODO(jbramley): There should be some extra assertions here (as in the ARM
4286 // back-end), but this function is gone in bleeding_edge so it might not
4287 // matter anyway.
4288 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4289
4290 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4291 Instruction* load = Instruction::Cast(pc)->preceding(2);
4292 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4293 load->ImmPCOffset());
4294 if (entry == reinterpret_cast<uint64_t>(
4295 isolate->builtins()->OnStackReplacement()->entry())) {
4296 return ON_STACK_REPLACEMENT;
4297 } else if (entry == reinterpret_cast<uint64_t>(
4298 isolate->builtins()->OsrAfterStackCheck()->entry())) {
4299 return OSR_AFTER_STACK_CHECK;
4300 } else {
4301 UNREACHABLE();
4302 }
4303 }
4304
4305 return INTERRUPT;
4306}
4307
4308
4309} // namespace internal
4310} // namespace v8
4311
4312#endif // V8_TARGET_ARCH_ARM64