blob: aa67117a7f49200345af32f46a06565c52686608 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM64
6
7#include "src/ast/scopes.h"
8#include "src/code-factory.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/debug/debug.h"
12#include "src/full-codegen/full-codegen.h"
13#include "src/ic/ic.h"
14#include "src/parsing/parser.h"
15
16#include "src/arm64/code-stubs-arm64.h"
17#include "src/arm64/frames-arm64.h"
18#include "src/arm64/macro-assembler-arm64.h"
19
20namespace v8 {
21namespace internal {
22
Ben Murdoch097c5b22016-05-18 11:27:45 +010023#define __ ACCESS_MASM(masm())
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024
25class JumpPatchSite BASE_EMBEDDED {
26 public:
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
28#ifdef DEBUG
29 info_emitted_ = false;
30#endif
31 }
32
33 ~JumpPatchSite() {
34 if (patch_site_.is_bound()) {
35 DCHECK(info_emitted_);
36 } else {
37 DCHECK(reg_.IsNone());
38 }
39 }
40
41 void EmitJumpIfNotSmi(Register reg, Label* target) {
42 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
43 InstructionAccurateScope scope(masm_, 1);
44 DCHECK(!info_emitted_);
45 DCHECK(reg.Is64Bits());
46 DCHECK(!reg.Is(csp));
47 reg_ = reg;
48 __ bind(&patch_site_);
49 __ tbz(xzr, 0, target); // Always taken before patched.
50 }
51
52 void EmitJumpIfSmi(Register reg, Label* target) {
53 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
54 InstructionAccurateScope scope(masm_, 1);
55 DCHECK(!info_emitted_);
56 DCHECK(reg.Is64Bits());
57 DCHECK(!reg.Is(csp));
58 reg_ = reg;
59 __ bind(&patch_site_);
60 __ tbnz(xzr, 0, target); // Never taken before patched.
61 }
62
63 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
64 UseScratchRegisterScope temps(masm_);
65 Register temp = temps.AcquireX();
66 __ Orr(temp, reg1, reg2);
67 EmitJumpIfNotSmi(temp, target);
68 }
69
70 void EmitPatchInfo() {
71 Assembler::BlockPoolsScope scope(masm_);
72 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
73#ifdef DEBUG
74 info_emitted_ = true;
75#endif
76 }
77
78 private:
Ben Murdoch097c5b22016-05-18 11:27:45 +010079 MacroAssembler* masm() { return masm_; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000080 MacroAssembler* masm_;
81 Label patch_site_;
82 Register reg_;
83#ifdef DEBUG
84 bool info_emitted_;
85#endif
86};
87
88
89// Generate code for a JS function. On entry to the function the receiver
90// and arguments have been pushed on the stack left to right. The actual
91// argument count matches the formal parameter count expected by the
92// function.
93//
94// The live registers are:
95// - x1: the JS function object being called (i.e. ourselves).
96// - x3: the new target value
97// - cp: our context.
98// - fp: our caller's frame pointer.
99// - jssp: stack pointer.
100// - lr: return address.
101//
102// The function builds a JS frame. See JavaScriptFrameConstants in
103// frames-arm.h for its layout.
104void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ Function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
115 __ Peek(x10, receiver_offset);
116 __ AssertNotSmi(x10);
117 __ CompareObjectType(x10, x10, x11, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack.
122 // The MANUAL indicates that the scope shouldn't actually generate code
123 // to set up the frame because we do it manually below.
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125
126 // This call emits the following sequence in a way that can be patched for
127 // code ageing support:
128 // Push(lr, fp, cp, x1);
129 // Add(fp, jssp, 2 * kPointerSize);
130 info->set_prologue_offset(masm_->pc_offset());
131 __ Prologue(info->GeneratePreagedPrologue());
132
133 // Reserve space on the stack for locals.
134 { Comment cmnt(masm_, "[ Allocate locals");
135 int locals_count = info->scope()->num_stack_slots();
136 // Generators allocate locals, if any, in context slots.
137 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100138 OperandStackDepthIncrement(locals_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139 if (locals_count > 0) {
140 if (locals_count >= 128) {
141 Label ok;
142 DCHECK(jssp.Is(__ StackPointer()));
143 __ Sub(x10, jssp, locals_count * kPointerSize);
144 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
145 __ B(hs, &ok);
146 __ CallRuntime(Runtime::kThrowStackOverflow);
147 __ Bind(&ok);
148 }
149 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
150 if (FLAG_optimize_for_size) {
151 __ PushMultipleTimes(x10 , locals_count);
152 } else {
153 const int kMaxPushes = 32;
154 if (locals_count >= kMaxPushes) {
155 int loop_iterations = locals_count / kMaxPushes;
156 __ Mov(x2, loop_iterations);
157 Label loop_header;
158 __ Bind(&loop_header);
159 // Do pushes.
160 __ PushMultipleTimes(x10 , kMaxPushes);
161 __ Subs(x2, x2, 1);
162 __ B(ne, &loop_header);
163 }
164 int remaining = locals_count % kMaxPushes;
165 // Emit the remaining pushes.
166 __ PushMultipleTimes(x10 , remaining);
167 }
168 }
169 }
170
171 bool function_in_register_x1 = true;
172
173 if (info->scope()->num_heap_slots() > 0) {
174 // Argument to NewContext is the function, which is still in x1.
175 Comment cmnt(masm_, "[ Allocate context");
176 bool need_write_barrier = true;
177 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178 if (info->scope()->is_script_scope()) {
179 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
180 __ Push(x1, x10);
181 __ CallRuntime(Runtime::kNewScriptContext);
182 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
183 // The new target value is not used, clobbering is safe.
184 DCHECK_NULL(info->scope()->new_target_var());
185 } else {
186 if (info->scope()->new_target_var() != nullptr) {
187 __ Push(x3); // Preserve new target.
188 }
189 if (slots <= FastNewContextStub::kMaximumSlots) {
190 FastNewContextStub stub(isolate(), slots);
191 __ CallStub(&stub);
192 // Result of FastNewContextStub is always in new space.
193 need_write_barrier = false;
194 } else {
195 __ Push(x1);
196 __ CallRuntime(Runtime::kNewFunctionContext);
197 }
198 if (info->scope()->new_target_var() != nullptr) {
199 __ Pop(x3); // Restore new target.
200 }
201 }
202 function_in_register_x1 = false;
203 // Context is returned in x0. It replaces the context passed to us.
204 // It's saved in the stack and kept live in cp.
205 __ Mov(cp, x0);
206 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
207 // Copy any necessary parameters into the context.
208 int num_parameters = info->scope()->num_parameters();
209 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
210 for (int i = first_parameter; i < num_parameters; i++) {
211 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
212 if (var->IsContextSlot()) {
213 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
214 (num_parameters - 1 - i) * kPointerSize;
215 // Load parameter from stack.
216 __ Ldr(x10, MemOperand(fp, parameter_offset));
217 // Store it in the context.
218 MemOperand target = ContextMemOperand(cp, var->index());
219 __ Str(x10, target);
220
221 // Update the write barrier.
222 if (need_write_barrier) {
223 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
224 x11, kLRHasBeenSaved, kDontSaveFPRegs);
225 } else if (FLAG_debug_code) {
226 Label done;
227 __ JumpIfInNewSpace(cp, &done);
228 __ Abort(kExpectedNewSpaceObject);
229 __ bind(&done);
230 }
231 }
232 }
233 }
234
235 // Register holding this function and new target are both trashed in case we
236 // bailout here. But since that can happen only when new target is not used
237 // and we allocate a context, the value of |function_in_register| is correct.
238 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
239
240 // Possibly set up a local binding to the this function which is used in
241 // derived constructors with super calls.
242 Variable* this_function_var = scope()->this_function_var();
243 if (this_function_var != nullptr) {
244 Comment cmnt(masm_, "[ This function");
245 if (!function_in_register_x1) {
246 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
247 // The write barrier clobbers register again, keep it marked as such.
248 }
249 SetVar(this_function_var, x1, x0, x2);
250 }
251
252 // Possibly set up a local binding to the new target value.
253 Variable* new_target_var = scope()->new_target_var();
254 if (new_target_var != nullptr) {
255 Comment cmnt(masm_, "[ new.target");
256 SetVar(new_target_var, x3, x0, x2);
257 }
258
259 // Possibly allocate RestParameters
260 int rest_index;
261 Variable* rest_param = scope()->rest_parameter(&rest_index);
262 if (rest_param) {
263 Comment cmnt(masm_, "[ Allocate rest parameter array");
Ben Murdoch097c5b22016-05-18 11:27:45 +0100264 if (!function_in_register_x1) {
265 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
266 }
267 FastNewRestParameterStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 function_in_register_x1 = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 SetVar(rest_param, x0, x1, x2);
271 }
272
273 Variable* arguments = scope()->arguments();
274 if (arguments != NULL) {
275 // Function uses arguments object.
276 Comment cmnt(masm_, "[ Allocate arguments object");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277 if (!function_in_register_x1) {
278 // Load this again, if it's used by the local context below.
279 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
280 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100281 if (is_strict(language_mode()) || !has_simple_parameters()) {
282 FastNewStrictArgumentsStub stub(isolate());
283 __ CallStub(&stub);
284 } else if (literal()->has_duplicate_parameters()) {
285 __ Push(x1);
286 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
287 } else {
288 FastNewSloppyArgumentsStub stub(isolate());
289 __ CallStub(&stub);
290 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000291
292 SetVar(arguments, x0, x1, x2);
293 }
294
295 if (FLAG_trace) {
296 __ CallRuntime(Runtime::kTraceEnter);
297 }
298
Ben Murdochda12d292016-06-02 14:46:10 +0100299 // Visit the declarations and body.
300 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
301 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302 Comment cmnt(masm_, "[ Declarations");
Ben Murdochda12d292016-06-02 14:46:10 +0100303 VisitDeclarations(scope()->declarations());
304 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305
Ben Murdochda12d292016-06-02 14:46:10 +0100306 // Assert that the declarations do not use ICs. Otherwise the debugger
307 // won't be able to redirect a PC at an IC to the correct IC in newly
308 // recompiled code.
309 DCHECK_EQ(0, ic_total_count_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310
Ben Murdochda12d292016-06-02 14:46:10 +0100311 {
312 Comment cmnt(masm_, "[ Stack check");
313 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
314 Label ok;
315 DCHECK(jssp.Is(__ StackPointer()));
316 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
317 __ B(hs, &ok);
318 PredictableCodeSizeScope predictable(masm_,
319 Assembler::kCallSizeWithRelocation);
320 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
321 __ Bind(&ok);
322 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000323
Ben Murdochda12d292016-06-02 14:46:10 +0100324 {
325 Comment cmnt(masm_, "[ Body");
326 DCHECK(loop_depth() == 0);
327 VisitStatements(literal()->body());
328 DCHECK(loop_depth() == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000329 }
330
331 // Always emit a 'return undefined' in case control fell off the end of
332 // the body.
333 { Comment cmnt(masm_, "[ return <undefined>;");
334 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
335 }
336 EmitReturnSequence();
337
338 // Force emission of the pools, so they don't get emitted in the middle
339 // of the back edge table.
340 masm()->CheckVeneerPool(true, false);
341 masm()->CheckConstPool(true, false);
342}
343
344
345void FullCodeGenerator::ClearAccumulator() {
346 __ Mov(x0, Smi::FromInt(0));
347}
348
349
350void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
351 __ Mov(x2, Operand(profiling_counter_));
352 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
353 __ Subs(x3, x3, Smi::FromInt(delta));
354 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
355}
356
357
358void FullCodeGenerator::EmitProfilingCounterReset() {
359 int reset_value = FLAG_interrupt_budget;
360 __ Mov(x2, Operand(profiling_counter_));
361 __ Mov(x3, Smi::FromInt(reset_value));
362 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
363}
364
365
366void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
367 Label* back_edge_target) {
368 DCHECK(jssp.Is(__ StackPointer()));
369 Comment cmnt(masm_, "[ Back edge bookkeeping");
370 // Block literal pools whilst emitting back edge code.
371 Assembler::BlockPoolsScope block_const_pool(masm_);
372 Label ok;
373
374 DCHECK(back_edge_target->is_bound());
375 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
376 // to reduce the absolute error due to the integer division. To do that,
377 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
378 // the result).
379 int distance =
380 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
381 kCodeSizeMultiplier / 2);
382 int weight = Min(kMaxBackEdgeWeight,
383 Max(1, distance / kCodeSizeMultiplier));
384 EmitProfilingCounterDecrement(weight);
385 __ B(pl, &ok);
386 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
387
388 // Record a mapping of this PC offset to the OSR id. This is used to find
389 // the AST id from the unoptimized code in order to use it as a key into
390 // the deoptimization input data found in the optimized code.
391 RecordBackEdge(stmt->OsrEntryId());
392
393 EmitProfilingCounterReset();
394
395 __ Bind(&ok);
396 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
397 // Record a mapping of the OSR id to this PC. This is used if the OSR
398 // entry becomes the target of a bailout. We don't expect it to be, but
399 // we want it to work if it is.
400 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
401}
402
Ben Murdoch097c5b22016-05-18 11:27:45 +0100403void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
404 bool is_tail_call) {
405 // Pretend that the exit is a backwards jump to the entry.
406 int weight = 1;
407 if (info_->ShouldSelfOptimize()) {
408 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
409 } else {
410 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
411 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
412 }
413 EmitProfilingCounterDecrement(weight);
414 Label ok;
415 __ B(pl, &ok);
416 // Don't need to save result register if we are going to do a tail call.
417 if (!is_tail_call) {
418 __ Push(x0);
419 }
420 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
421 if (!is_tail_call) {
422 __ Pop(x0);
423 }
424 EmitProfilingCounterReset();
425 __ Bind(&ok);
426}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000427
428void FullCodeGenerator::EmitReturnSequence() {
429 Comment cmnt(masm_, "[ Return sequence");
430
431 if (return_label_.is_bound()) {
432 __ B(&return_label_);
433
434 } else {
435 __ Bind(&return_label_);
436 if (FLAG_trace) {
437 // Push the return value on the stack as the parameter.
438 // Runtime::TraceExit returns its parameter in x0.
439 __ Push(result_register());
440 __ CallRuntime(Runtime::kTraceExit);
441 DCHECK(x0.Is(result_register()));
442 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100443 EmitProfilingCounterHandlingForReturnSequence(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000444
445 SetReturnPosition(literal());
446 const Register& current_sp = __ StackPointer();
447 // Nothing ensures 16 bytes alignment here.
448 DCHECK(!current_sp.Is(csp));
449 __ Mov(current_sp, fp);
450 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
451 // Drop the arguments and receiver and return.
452 // TODO(all): This implementation is overkill as it supports 2**31+1
453 // arguments, consider how to improve it without creating a security
454 // hole.
455 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
456 __ Add(current_sp, current_sp, ip0);
457 __ Ret();
458 int32_t arg_count = info_->scope()->num_parameters() + 1;
459 __ dc64(kXRegSize * arg_count);
460 }
461}
462
463
464void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
465 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
466 codegen()->GetVar(result_register(), var);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100467 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468}
469
470
471void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
472 // Root values have no side effects.
473}
474
475
476void FullCodeGenerator::AccumulatorValueContext::Plug(
477 Heap::RootListIndex index) const {
478 __ LoadRoot(result_register(), index);
479}
480
481
482void FullCodeGenerator::StackValueContext::Plug(
483 Heap::RootListIndex index) const {
484 __ LoadRoot(result_register(), index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100485 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000486}
487
488
489void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
490 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
491 false_label_);
492 if (index == Heap::kUndefinedValueRootIndex ||
493 index == Heap::kNullValueRootIndex ||
494 index == Heap::kFalseValueRootIndex) {
495 if (false_label_ != fall_through_) __ B(false_label_);
496 } else if (index == Heap::kTrueValueRootIndex) {
497 if (true_label_ != fall_through_) __ B(true_label_);
498 } else {
499 __ LoadRoot(result_register(), index);
500 codegen()->DoTest(this);
501 }
502}
503
504
505void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
506}
507
508
509void FullCodeGenerator::AccumulatorValueContext::Plug(
510 Handle<Object> lit) const {
511 __ Mov(result_register(), Operand(lit));
512}
513
514
515void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
516 // Immediates cannot be pushed directly.
517 __ Mov(result_register(), Operand(lit));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 codegen()->PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519}
520
521
522void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
523 codegen()->PrepareForBailoutBeforeSplit(condition(),
524 true,
525 true_label_,
526 false_label_);
Ben Murdochda12d292016-06-02 14:46:10 +0100527 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000528 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
529 if (false_label_ != fall_through_) __ B(false_label_);
530 } else if (lit->IsTrue() || lit->IsJSObject()) {
531 if (true_label_ != fall_through_) __ B(true_label_);
532 } else if (lit->IsString()) {
533 if (String::cast(*lit)->length() == 0) {
534 if (false_label_ != fall_through_) __ B(false_label_);
535 } else {
536 if (true_label_ != fall_through_) __ B(true_label_);
537 }
538 } else if (lit->IsSmi()) {
539 if (Smi::cast(*lit)->value() == 0) {
540 if (false_label_ != fall_through_) __ B(false_label_);
541 } else {
542 if (true_label_ != fall_through_) __ B(true_label_);
543 }
544 } else {
545 // For simplicity we always test the accumulator register.
546 __ Mov(result_register(), Operand(lit));
547 codegen()->DoTest(this);
548 }
549}
550
551
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
553 Register reg) const {
554 DCHECK(count > 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100555 if (count > 1) codegen()->DropOperands(count - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 __ Poke(reg, 0);
557}
558
559
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000560void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
561 Label* materialize_false) const {
562 DCHECK(materialize_true == materialize_false);
563 __ Bind(materialize_true);
564}
565
566
567void FullCodeGenerator::AccumulatorValueContext::Plug(
568 Label* materialize_true,
569 Label* materialize_false) const {
570 Label done;
571 __ Bind(materialize_true);
572 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
573 __ B(&done);
574 __ Bind(materialize_false);
575 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
576 __ Bind(&done);
577}
578
579
580void FullCodeGenerator::StackValueContext::Plug(
581 Label* materialize_true,
582 Label* materialize_false) const {
583 Label done;
584 __ Bind(materialize_true);
585 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
586 __ B(&done);
587 __ Bind(materialize_false);
588 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
589 __ Bind(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100590 codegen()->PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591}
592
593
594void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
595 Label* materialize_false) const {
596 DCHECK(materialize_true == true_label_);
597 DCHECK(materialize_false == false_label_);
598}
599
600
601void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
602 Heap::RootListIndex value_root_index =
603 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
604 __ LoadRoot(result_register(), value_root_index);
605}
606
607
608void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
609 Heap::RootListIndex value_root_index =
610 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
611 __ LoadRoot(x10, value_root_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100612 codegen()->PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613}
614
615
616void FullCodeGenerator::TestContext::Plug(bool flag) const {
617 codegen()->PrepareForBailoutBeforeSplit(condition(),
618 true,
619 true_label_,
620 false_label_);
621 if (flag) {
622 if (true_label_ != fall_through_) {
623 __ B(true_label_);
624 }
625 } else {
626 if (false_label_ != fall_through_) {
627 __ B(false_label_);
628 }
629 }
630}
631
632
633void FullCodeGenerator::DoTest(Expression* condition,
634 Label* if_true,
635 Label* if_false,
636 Label* fall_through) {
Ben Murdochda12d292016-06-02 14:46:10 +0100637 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000638 CallIC(ic, condition->test_id());
639 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
640 Split(eq, if_true, if_false, fall_through);
641}
642
643
644// If (cond), branch to if_true.
645// If (!cond), branch to if_false.
646// fall_through is used as an optimization in cases where only one branch
647// instruction is necessary.
648void FullCodeGenerator::Split(Condition cond,
649 Label* if_true,
650 Label* if_false,
651 Label* fall_through) {
652 if (if_false == fall_through) {
653 __ B(cond, if_true);
654 } else if (if_true == fall_through) {
655 DCHECK(if_false != fall_through);
656 __ B(NegateCondition(cond), if_false);
657 } else {
658 __ B(cond, if_true);
659 __ B(if_false);
660 }
661}
662
663
664MemOperand FullCodeGenerator::StackOperand(Variable* var) {
665 // Offset is negative because higher indexes are at lower addresses.
666 int offset = -var->index() * kXRegSize;
667 // Adjust by a (parameter or local) base offset.
668 if (var->IsParameter()) {
669 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
670 } else {
671 offset += JavaScriptFrameConstants::kLocal0Offset;
672 }
673 return MemOperand(fp, offset);
674}
675
676
677MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
678 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
679 if (var->IsContextSlot()) {
680 int context_chain_length = scope()->ContextChainLength(var->scope());
681 __ LoadContext(scratch, context_chain_length);
682 return ContextMemOperand(scratch, var->index());
683 } else {
684 return StackOperand(var);
685 }
686}
687
688
689void FullCodeGenerator::GetVar(Register dest, Variable* var) {
690 // Use destination as scratch.
691 MemOperand location = VarOperand(var, dest);
692 __ Ldr(dest, location);
693}
694
695
696void FullCodeGenerator::SetVar(Variable* var,
697 Register src,
698 Register scratch0,
699 Register scratch1) {
700 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
701 DCHECK(!AreAliased(src, scratch0, scratch1));
702 MemOperand location = VarOperand(var, scratch0);
703 __ Str(src, location);
704
705 // Emit the write barrier code if the location is in the heap.
706 if (var->IsContextSlot()) {
707 // scratch0 contains the correct context.
708 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
709 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
710 }
711}
712
713
714void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
715 bool should_normalize,
716 Label* if_true,
717 Label* if_false) {
718 // Only prepare for bailouts before splits if we're in a test
719 // context. Otherwise, we let the Visit function deal with the
720 // preparation to avoid preparing with the same AST id twice.
721 if (!context()->IsTest()) return;
722
723 // TODO(all): Investigate to see if there is something to work on here.
724 Label skip;
725 if (should_normalize) {
726 __ B(&skip);
727 }
728 PrepareForBailout(expr, TOS_REG);
729 if (should_normalize) {
730 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
731 Split(eq, if_true, if_false, NULL);
732 __ Bind(&skip);
733 }
734}
735
736
737void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
738 // The variable in the declaration always resides in the current function
739 // context.
740 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100741 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742 // Check that we're not inside a with or catch context.
743 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
744 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
745 __ Check(ne, kDeclarationInWithContext);
746 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
747 __ Check(ne, kDeclarationInCatchContext);
748 }
749}
750
751
752void FullCodeGenerator::VisitVariableDeclaration(
753 VariableDeclaration* declaration) {
754 // If it was not possible to allocate the variable at compile time, we
755 // need to "declare" it at runtime to make sure it actually exists in the
756 // local context.
757 VariableProxy* proxy = declaration->proxy();
758 VariableMode mode = declaration->mode();
759 Variable* variable = proxy->var();
760 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
761
762 switch (variable->location()) {
763 case VariableLocation::GLOBAL:
764 case VariableLocation::UNALLOCATED:
765 globals_->Add(variable->name(), zone());
766 globals_->Add(variable->binding_needs_init()
767 ? isolate()->factory()->the_hole_value()
768 : isolate()->factory()->undefined_value(),
769 zone());
770 break;
771
772 case VariableLocation::PARAMETER:
773 case VariableLocation::LOCAL:
774 if (hole_init) {
775 Comment cmnt(masm_, "[ VariableDeclaration");
776 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
777 __ Str(x10, StackOperand(variable));
778 }
779 break;
780
781 case VariableLocation::CONTEXT:
782 if (hole_init) {
783 Comment cmnt(masm_, "[ VariableDeclaration");
784 EmitDebugCheckDeclarationContext(variable);
785 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
786 __ Str(x10, ContextMemOperand(cp, variable->index()));
787 // No write barrier since the_hole_value is in old space.
788 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
789 }
790 break;
791
792 case VariableLocation::LOOKUP: {
793 Comment cmnt(masm_, "[ VariableDeclaration");
794 __ Mov(x2, Operand(variable->name()));
795 // Declaration nodes are always introduced in one of four modes.
796 DCHECK(IsDeclaredVariableMode(mode));
797 // Push initial value, if any.
798 // Note: For variables we must not push an initial value (such as
799 // 'undefined') because we may have a (legal) redeclaration and we
800 // must not destroy the current value.
801 if (hole_init) {
802 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
803 __ Push(x2, x0);
804 } else {
805 // Pushing 0 (xzr) indicates no initial value.
806 __ Push(x2, xzr);
807 }
808 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
809 __ CallRuntime(Runtime::kDeclareLookupSlot);
810 break;
811 }
812 }
813}
814
815
816void FullCodeGenerator::VisitFunctionDeclaration(
817 FunctionDeclaration* declaration) {
818 VariableProxy* proxy = declaration->proxy();
819 Variable* variable = proxy->var();
820 switch (variable->location()) {
821 case VariableLocation::GLOBAL:
822 case VariableLocation::UNALLOCATED: {
823 globals_->Add(variable->name(), zone());
824 Handle<SharedFunctionInfo> function =
825 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
826 // Check for stack overflow exception.
827 if (function.is_null()) return SetStackOverflow();
828 globals_->Add(function, zone());
829 break;
830 }
831
832 case VariableLocation::PARAMETER:
833 case VariableLocation::LOCAL: {
834 Comment cmnt(masm_, "[ Function Declaration");
835 VisitForAccumulatorValue(declaration->fun());
836 __ Str(result_register(), StackOperand(variable));
837 break;
838 }
839
840 case VariableLocation::CONTEXT: {
841 Comment cmnt(masm_, "[ Function Declaration");
842 EmitDebugCheckDeclarationContext(variable);
843 VisitForAccumulatorValue(declaration->fun());
844 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
845 int offset = Context::SlotOffset(variable->index());
846 // We know that we have written a function, which is not a smi.
847 __ RecordWriteContextSlot(cp,
848 offset,
849 result_register(),
850 x2,
851 kLRHasBeenSaved,
852 kDontSaveFPRegs,
853 EMIT_REMEMBERED_SET,
854 OMIT_SMI_CHECK);
855 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
856 break;
857 }
858
859 case VariableLocation::LOOKUP: {
860 Comment cmnt(masm_, "[ Function Declaration");
861 __ Mov(x2, Operand(variable->name()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100862 PushOperand(x2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000863 // Push initial value for function declaration.
864 VisitForStackValue(declaration->fun());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100865 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
866 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000867 break;
868 }
869 }
870}
871
872
873void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
874 // Call the runtime to declare the globals.
875 __ Mov(x11, Operand(pairs));
876 Register flags = xzr;
877 if (Smi::FromInt(DeclareGlobalsFlags())) {
878 flags = x10;
879 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
880 }
881 __ Push(x11, flags);
882 __ CallRuntime(Runtime::kDeclareGlobals);
883 // Return value is ignored.
884}
885
886
887void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
888 // Call the runtime to declare the modules.
889 __ Push(descriptions);
890 __ CallRuntime(Runtime::kDeclareModules);
891 // Return value is ignored.
892}
893
894
895void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
896 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
897 Comment cmnt(masm_, "[ SwitchStatement");
898 Breakable nested_statement(this, stmt);
899 SetStatementPosition(stmt);
900
901 // Keep the switch value on the stack until a case matches.
902 VisitForStackValue(stmt->tag());
903 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
904
905 ZoneList<CaseClause*>* clauses = stmt->cases();
906 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
907
908 Label next_test; // Recycled for each test.
909 // Compile all the tests with branches to their bodies.
910 for (int i = 0; i < clauses->length(); i++) {
911 CaseClause* clause = clauses->at(i);
912 clause->body_target()->Unuse();
913
914 // The default is not a test, but remember it as final fall through.
915 if (clause->is_default()) {
916 default_clause = clause;
917 continue;
918 }
919
920 Comment cmnt(masm_, "[ Case comparison");
921 __ Bind(&next_test);
922 next_test.Unuse();
923
924 // Compile the label expression.
925 VisitForAccumulatorValue(clause->label());
926
927 // Perform the comparison as if via '==='.
928 __ Peek(x1, 0); // Switch value.
929
930 JumpPatchSite patch_site(masm_);
931 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
932 Label slow_case;
933 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
934 __ Cmp(x1, x0);
935 __ B(ne, &next_test);
936 __ Drop(1); // Switch value is no longer needed.
937 __ B(clause->body_target());
938 __ Bind(&slow_case);
939 }
940
941 // Record position before stub call for type feedback.
942 SetExpressionPosition(clause);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100943 Handle<Code> ic =
944 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945 CallIC(ic, clause->CompareId());
946 patch_site.EmitPatchInfo();
947
948 Label skip;
949 __ B(&skip);
950 PrepareForBailout(clause, TOS_REG);
951 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
952 __ Drop(1);
953 __ B(clause->body_target());
954 __ Bind(&skip);
955
956 __ Cbnz(x0, &next_test);
957 __ Drop(1); // Switch value is no longer needed.
958 __ B(clause->body_target());
959 }
960
961 // Discard the test value and jump to the default if present, otherwise to
962 // the end of the statement.
963 __ Bind(&next_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100964 DropOperands(1); // Switch value is no longer needed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000965 if (default_clause == NULL) {
966 __ B(nested_statement.break_label());
967 } else {
968 __ B(default_clause->body_target());
969 }
970
971 // Compile all the case bodies.
972 for (int i = 0; i < clauses->length(); i++) {
973 Comment cmnt(masm_, "[ Case body");
974 CaseClause* clause = clauses->at(i);
975 __ Bind(clause->body_target());
976 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
977 VisitStatements(clause->statements());
978 }
979
980 __ Bind(nested_statement.break_label());
981 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
982}
983
984
985void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
986 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
987 Comment cmnt(masm_, "[ ForInStatement");
988 SetStatementPosition(stmt, SKIP_BREAK);
989
990 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
991
992 // TODO(all): This visitor probably needs better comments and a revisit.
993
Ben Murdoch097c5b22016-05-18 11:27:45 +0100994 // Get the object to enumerate over.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000995 SetExpressionAsStatementPosition(stmt->enumerable());
996 VisitForAccumulatorValue(stmt->enumerable());
Ben Murdochda12d292016-06-02 14:46:10 +0100997 OperandStackDepthIncrement(5);
998
999 Label loop, exit;
1000 Iteration loop_statement(this, stmt);
1001 increment_loop_depth();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002
Ben Murdoch097c5b22016-05-18 11:27:45 +01001003 // If the object is null or undefined, skip over the loop, otherwise convert
1004 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 Label convert, done_convert;
1006 __ JumpIfSmi(x0, &convert);
1007 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, &done_convert, ge);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001008 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, &exit);
1009 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001010 __ Bind(&convert);
1011 ToObjectStub stub(isolate());
1012 __ CallStub(&stub);
1013 __ Bind(&done_convert);
1014 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1015 __ Push(x0);
1016
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001017 // Check cache validity in generated code. This is a fast case for
1018 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1019 // guarantee cache validity, call the runtime system to check cache
1020 // validity or get the property names in a fixed array.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001021 // Note: Proxies never have an enum cache, so will always take the
1022 // slow path.
1023 Label call_runtime;
1024 __ CheckEnumCache(x0, x15, x10, x11, x12, x13, &call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001025
1026 // The enum cache is valid. Load the map of the object being
1027 // iterated over and use the cache for the iteration.
1028 Label use_cache;
1029 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1030 __ B(&use_cache);
1031
1032 // Get the set of properties to enumerate.
1033 __ Bind(&call_runtime);
1034 __ Push(x0); // Duplicate the enumerable object on the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001035 __ CallRuntime(Runtime::kForInEnumerate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001036 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1037
1038 // If we got a map from the runtime call, we can do a fast
1039 // modification check. Otherwise, we got a fixed array, and we have
1040 // to do a slow check.
1041 Label fixed_array, no_descriptors;
1042 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1043 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1044
1045 // We got a map in register x0. Get the enumeration cache from it.
1046 __ Bind(&use_cache);
1047
1048 __ EnumLengthUntagged(x1, x0);
1049 __ Cbz(x1, &no_descriptors);
1050
1051 __ LoadInstanceDescriptors(x0, x2);
1052 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1053 __ Ldr(x2,
1054 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1055
1056 // Set up the four remaining stack slots.
1057 __ SmiTag(x1);
1058 // Map, enumeration cache, enum cache length, zero (both last as smis).
1059 __ Push(x0, x2, x1, xzr);
1060 __ B(&loop);
1061
1062 __ Bind(&no_descriptors);
1063 __ Drop(1);
1064 __ B(&exit);
1065
1066 // We got a fixed array in register x0. Iterate through that.
1067 __ Bind(&fixed_array);
1068
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001069 __ Mov(x1, Smi::FromInt(1)); // Smi(1) indicates slow check.
1070 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001071 __ Push(x1, x0, x2); // Smi and array, fixed array length (as smi).
1072 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
1073 __ Push(xzr); // Initial index.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074
1075 // Generate code for doing the condition check.
1076 __ Bind(&loop);
1077 SetExpressionAsStatementPosition(stmt->each());
1078
1079 // Load the current count to x0, load the length to x1.
1080 __ PeekPair(x0, x1, 0);
1081 __ Cmp(x0, x1); // Compare to the array length.
1082 __ B(hs, loop_statement.break_label());
1083
1084 // Get the current entry of the array into register r3.
1085 __ Peek(x10, 2 * kXRegSize);
1086 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1087 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1088
1089 // Get the expected map from the stack or a smi in the
1090 // permanent slow case into register x10.
1091 __ Peek(x2, 3 * kXRegSize);
1092
1093 // Check if the expected map still matches that of the enumerable.
1094 // If not, we may have to filter the key.
1095 Label update_each;
1096 __ Peek(x1, 4 * kXRegSize);
1097 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1098 __ Cmp(x11, x2);
1099 __ B(eq, &update_each);
1100
Ben Murdochda12d292016-06-02 14:46:10 +01001101 // We need to filter the key, record slow-path here.
1102 int const vector_index = SmiFromSlot(slot)->value();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001103 __ EmitLoadTypeFeedbackVector(x0);
1104 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1105 __ Str(x10, FieldMemOperand(x0, FixedArray::OffsetOfElementAt(vector_index)));
1106
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001107 // Convert the entry to a string or (smi) 0 if it isn't a property
1108 // any more. If the property has been removed while iterating, we
1109 // just skip it.
1110 __ Push(x1, x3);
1111 __ CallRuntime(Runtime::kForInFilter);
1112 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1113 __ Mov(x3, x0);
1114 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1115 loop_statement.continue_label());
1116
1117 // Update the 'each' property or variable from the possibly filtered
1118 // entry in register x3.
1119 __ Bind(&update_each);
1120 __ Mov(result_register(), x3);
1121 // Perform the assignment as if via '='.
1122 { EffectContext context(this);
1123 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1124 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1125 }
1126
1127 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1128 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1129 // Generate code for the body of the loop.
1130 Visit(stmt->body());
1131
1132 // Generate code for going to the next element by incrementing
1133 // the index (smi) stored on top of the stack.
1134 __ Bind(loop_statement.continue_label());
1135 // TODO(all): We could use a callee saved register to avoid popping.
1136 __ Pop(x0);
1137 __ Add(x0, x0, Smi::FromInt(1));
1138 __ Push(x0);
1139
1140 EmitBackEdgeBookkeeping(stmt, &loop);
1141 __ B(&loop);
1142
1143 // Remove the pointers stored on the stack.
1144 __ Bind(loop_statement.break_label());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001145 DropOperands(5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001146
1147 // Exit and decrement the loop depth.
1148 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1149 __ Bind(&exit);
1150 decrement_loop_depth();
1151}
1152
1153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1155 FeedbackVectorSlot slot) {
1156 DCHECK(NeedsHomeObject(initializer));
1157 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1158 __ Mov(StoreDescriptor::NameRegister(),
1159 Operand(isolate()->factory()->home_object_symbol()));
1160 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1161 EmitLoadStoreICSlot(slot);
1162 CallStoreIC();
1163}
1164
1165
1166void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1167 int offset,
1168 FeedbackVectorSlot slot) {
1169 DCHECK(NeedsHomeObject(initializer));
1170 __ Move(StoreDescriptor::ReceiverRegister(), x0);
1171 __ Mov(StoreDescriptor::NameRegister(),
1172 Operand(isolate()->factory()->home_object_symbol()));
1173 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1174 EmitLoadStoreICSlot(slot);
1175 CallStoreIC();
1176}
1177
1178
1179void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1180 TypeofMode typeof_mode,
1181 Label* slow) {
1182 Register current = cp;
1183 Register next = x10;
1184 Register temp = x11;
1185
1186 Scope* s = scope();
1187 while (s != NULL) {
1188 if (s->num_heap_slots() > 0) {
1189 if (s->calls_sloppy_eval()) {
1190 // Check that extension is "the hole".
1191 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1192 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1193 }
1194 // Load next context in chain.
1195 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1196 // Walk the rest of the chain without clobbering cp.
1197 current = next;
1198 }
1199 // If no outer scope calls eval, we do not need to check more
1200 // context extensions.
1201 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1202 s = s->outer_scope();
1203 }
1204
1205 if (s->is_eval_scope()) {
1206 Label loop, fast;
1207 __ Mov(next, current);
1208
1209 __ Bind(&loop);
1210 // Terminate at native context.
1211 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1212 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1213 // Check that extension is "the hole".
1214 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1215 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1216 // Load next context in chain.
1217 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1218 __ B(&loop);
1219 __ Bind(&fast);
1220 }
1221
1222 // All extension objects were empty and it is safe to use a normal global
1223 // load machinery.
1224 EmitGlobalVariableLoad(proxy, typeof_mode);
1225}
1226
1227
1228MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1229 Label* slow) {
1230 DCHECK(var->IsContextSlot());
1231 Register context = cp;
1232 Register next = x10;
1233 Register temp = x11;
1234
1235 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1236 if (s->num_heap_slots() > 0) {
1237 if (s->calls_sloppy_eval()) {
1238 // Check that extension is "the hole".
1239 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1240 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1241 }
1242 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1243 // Walk the rest of the chain without clobbering cp.
1244 context = next;
1245 }
1246 }
1247 // Check that last extension is "the hole".
1248 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1249 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1250
1251 // This function is used only for loads, not stores, so it's safe to
1252 // return an cp-based operand (the write barrier cannot be allowed to
1253 // destroy the cp register).
1254 return ContextMemOperand(context, var->index());
1255}
1256
1257
1258void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1259 TypeofMode typeof_mode,
1260 Label* slow, Label* done) {
1261 // Generate fast-case code for variables that might be shadowed by
1262 // eval-introduced variables. Eval is used a lot without
1263 // introducing variables. In those cases, we do not want to
1264 // perform a runtime call for all variables in the scope
1265 // containing the eval.
1266 Variable* var = proxy->var();
1267 if (var->mode() == DYNAMIC_GLOBAL) {
1268 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1269 __ B(done);
1270 } else if (var->mode() == DYNAMIC_LOCAL) {
1271 Variable* local = var->local_if_not_shadowed();
1272 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1273 if (local->mode() == LET || local->mode() == CONST ||
1274 local->mode() == CONST_LEGACY) {
1275 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1276 if (local->mode() == CONST_LEGACY) {
1277 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1278 } else { // LET || CONST
1279 __ Mov(x0, Operand(var->name()));
1280 __ Push(x0);
1281 __ CallRuntime(Runtime::kThrowReferenceError);
1282 }
1283 }
1284 __ B(done);
1285 }
1286}
1287
1288
1289void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1290 TypeofMode typeof_mode) {
1291 Variable* var = proxy->var();
1292 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1293 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1294 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1295 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1296 __ Mov(LoadDescriptor::SlotRegister(),
1297 SmiFromSlot(proxy->VariableFeedbackSlot()));
1298 CallLoadIC(typeof_mode);
1299}
1300
1301
1302void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1303 TypeofMode typeof_mode) {
1304 // Record position before possible IC call.
1305 SetExpressionPosition(proxy);
1306 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1307 Variable* var = proxy->var();
1308
1309 // Three cases: global variables, lookup variables, and all other types of
1310 // variables.
1311 switch (var->location()) {
1312 case VariableLocation::GLOBAL:
1313 case VariableLocation::UNALLOCATED: {
1314 Comment cmnt(masm_, "Global variable");
1315 EmitGlobalVariableLoad(proxy, typeof_mode);
1316 context()->Plug(x0);
1317 break;
1318 }
1319
1320 case VariableLocation::PARAMETER:
1321 case VariableLocation::LOCAL:
1322 case VariableLocation::CONTEXT: {
1323 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1324 Comment cmnt(masm_, var->IsContextSlot()
1325 ? "Context variable"
1326 : "Stack variable");
1327 if (NeedsHoleCheckForLoad(proxy)) {
1328 // Let and const need a read barrier.
1329 GetVar(x0, var);
1330 Label done;
1331 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1332 if (var->mode() == LET || var->mode() == CONST) {
1333 // Throw a reference error when using an uninitialized let/const
1334 // binding in harmony mode.
1335 __ Mov(x0, Operand(var->name()));
1336 __ Push(x0);
1337 __ CallRuntime(Runtime::kThrowReferenceError);
1338 __ Bind(&done);
1339 } else {
1340 // Uninitialized legacy const bindings are unholed.
1341 DCHECK(var->mode() == CONST_LEGACY);
1342 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1343 __ Bind(&done);
1344 }
1345 context()->Plug(x0);
1346 break;
1347 }
1348 context()->Plug(var);
1349 break;
1350 }
1351
1352 case VariableLocation::LOOKUP: {
1353 Label done, slow;
1354 // Generate code for loading from variables potentially shadowed by
1355 // eval-introduced variables.
1356 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1357 __ Bind(&slow);
1358 Comment cmnt(masm_, "Lookup variable");
Ben Murdoch097c5b22016-05-18 11:27:45 +01001359 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001360 Runtime::FunctionId function_id =
1361 typeof_mode == NOT_INSIDE_TYPEOF
1362 ? Runtime::kLoadLookupSlot
Ben Murdoch097c5b22016-05-18 11:27:45 +01001363 : Runtime::kLoadLookupSlotInsideTypeof;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001364 __ CallRuntime(function_id);
1365 __ Bind(&done);
1366 context()->Plug(x0);
1367 break;
1368 }
1369 }
1370}
1371
1372
1373void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1374 Comment cmnt(masm_, "[ RegExpLiteral");
1375 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1376 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1377 __ Mov(x1, Operand(expr->pattern()));
1378 __ Mov(x0, Smi::FromInt(expr->flags()));
1379 FastCloneRegExpStub stub(isolate());
1380 __ CallStub(&stub);
1381 context()->Plug(x0);
1382}
1383
1384
1385void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1386 Expression* expression = (property == NULL) ? NULL : property->value();
1387 if (expression == NULL) {
1388 __ LoadRoot(x10, Heap::kNullValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001389 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001390 } else {
1391 VisitForStackValue(expression);
1392 if (NeedsHomeObject(expression)) {
1393 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1394 property->kind() == ObjectLiteral::Property::SETTER);
1395 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1396 EmitSetHomeObject(expression, offset, property->GetSlot());
1397 }
1398 }
1399}
1400
1401
1402void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1403 Comment cmnt(masm_, "[ ObjectLiteral");
1404
1405 Handle<FixedArray> constant_properties = expr->constant_properties();
1406 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1407 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1408 __ Mov(x1, Operand(constant_properties));
1409 int flags = expr->ComputeFlags();
1410 __ Mov(x0, Smi::FromInt(flags));
1411 if (MustCreateObjectLiteralWithRuntime(expr)) {
1412 __ Push(x3, x2, x1, x0);
1413 __ CallRuntime(Runtime::kCreateObjectLiteral);
1414 } else {
1415 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1416 __ CallStub(&stub);
1417 }
1418 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1419
1420 // If result_saved is true the result is on top of the stack. If
1421 // result_saved is false the result is in x0.
1422 bool result_saved = false;
1423
1424 AccessorTable accessor_table(zone());
1425 int property_index = 0;
1426 for (; property_index < expr->properties()->length(); property_index++) {
1427 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1428 if (property->is_computed_name()) break;
1429 if (property->IsCompileTimeValue()) continue;
1430
1431 Literal* key = property->key()->AsLiteral();
1432 Expression* value = property->value();
1433 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001434 PushOperand(x0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001435 result_saved = true;
1436 }
1437 switch (property->kind()) {
1438 case ObjectLiteral::Property::CONSTANT:
1439 UNREACHABLE();
1440 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1441 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1442 // Fall through.
1443 case ObjectLiteral::Property::COMPUTED:
1444 // It is safe to use [[Put]] here because the boilerplate already
1445 // contains computed properties with an uninitialized value.
1446 if (key->value()->IsInternalizedString()) {
1447 if (property->emit_store()) {
1448 VisitForAccumulatorValue(value);
1449 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1450 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1451 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1452 EmitLoadStoreICSlot(property->GetSlot(0));
1453 CallStoreIC();
1454 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1455
1456 if (NeedsHomeObject(value)) {
1457 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1458 }
1459 } else {
1460 VisitForEffect(value);
1461 }
1462 break;
1463 }
1464 __ Peek(x0, 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001465 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001466 VisitForStackValue(key);
1467 VisitForStackValue(value);
1468 if (property->emit_store()) {
1469 if (NeedsHomeObject(value)) {
1470 EmitSetHomeObject(value, 2, property->GetSlot());
1471 }
1472 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
Ben Murdoch097c5b22016-05-18 11:27:45 +01001473 PushOperand(x0);
1474 CallRuntimeWithOperands(Runtime::kSetProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001475 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001476 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001477 }
1478 break;
1479 case ObjectLiteral::Property::PROTOTYPE:
1480 DCHECK(property->emit_store());
1481 // Duplicate receiver on stack.
1482 __ Peek(x0, 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001483 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001484 VisitForStackValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001485 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001486 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1487 NO_REGISTERS);
1488 break;
1489 case ObjectLiteral::Property::GETTER:
1490 if (property->emit_store()) {
1491 accessor_table.lookup(key)->second->getter = property;
1492 }
1493 break;
1494 case ObjectLiteral::Property::SETTER:
1495 if (property->emit_store()) {
1496 accessor_table.lookup(key)->second->setter = property;
1497 }
1498 break;
1499 }
1500 }
1501
1502 // Emit code to define accessors, using only a single call to the runtime for
1503 // each pair of corresponding getters and setters.
1504 for (AccessorTable::Iterator it = accessor_table.begin();
1505 it != accessor_table.end();
1506 ++it) {
1507 __ Peek(x10, 0); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001508 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001509 VisitForStackValue(it->first);
1510 EmitAccessor(it->second->getter);
1511 EmitAccessor(it->second->setter);
1512 __ Mov(x10, Smi::FromInt(NONE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001513 PushOperand(x10);
1514 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001515 }
1516
1517 // Object literals have two parts. The "static" part on the left contains no
1518 // computed property names, and so we can compute its map ahead of time; see
1519 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1520 // starts with the first computed property name, and continues with all
1521 // properties to its right. All the code from above initializes the static
1522 // component of the object literal, and arranges for the map of the result to
1523 // reflect the static order in which the keys appear. For the dynamic
1524 // properties, we compile them into a series of "SetOwnProperty" runtime
1525 // calls. This will preserve insertion order.
1526 for (; property_index < expr->properties()->length(); property_index++) {
1527 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1528
1529 Expression* value = property->value();
1530 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001531 PushOperand(x0); // Save result on stack
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001532 result_saved = true;
1533 }
1534
1535 __ Peek(x10, 0); // Duplicate receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001536 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537
1538 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1539 DCHECK(!property->is_computed_name());
1540 VisitForStackValue(value);
1541 DCHECK(property->emit_store());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001542 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001543 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1544 NO_REGISTERS);
1545 } else {
1546 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1547 VisitForStackValue(value);
1548 if (NeedsHomeObject(value)) {
1549 EmitSetHomeObject(value, 2, property->GetSlot());
1550 }
1551
1552 switch (property->kind()) {
1553 case ObjectLiteral::Property::CONSTANT:
1554 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1555 case ObjectLiteral::Property::COMPUTED:
1556 if (property->emit_store()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001557 PushOperand(Smi::FromInt(NONE));
1558 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1559 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001561 DropOperands(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001562 }
1563 break;
1564
1565 case ObjectLiteral::Property::PROTOTYPE:
1566 UNREACHABLE();
1567 break;
1568
1569 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001570 PushOperand(Smi::FromInt(NONE));
1571 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 break;
1573
1574 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001575 PushOperand(Smi::FromInt(NONE));
1576 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001577 break;
1578 }
1579 }
1580 }
1581
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001582 if (result_saved) {
1583 context()->PlugTOS();
1584 } else {
1585 context()->Plug(x0);
1586 }
1587}
1588
1589
1590void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1591 Comment cmnt(masm_, "[ ArrayLiteral");
1592
1593 Handle<FixedArray> constant_elements = expr->constant_elements();
1594 bool has_fast_elements =
1595 IsFastObjectElementsKind(expr->constant_elements_kind());
1596
1597 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1598 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1599 // If the only customer of allocation sites is transitioning, then
1600 // we can turn it off if we don't have anywhere else to transition to.
1601 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1602 }
1603
1604 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1605 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1606 __ Mov(x1, Operand(constant_elements));
1607 if (MustCreateArrayLiteralWithRuntime(expr)) {
1608 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1609 __ Push(x3, x2, x1, x0);
1610 __ CallRuntime(Runtime::kCreateArrayLiteral);
1611 } else {
1612 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1613 __ CallStub(&stub);
1614 }
1615 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1616
1617 bool result_saved = false; // Is the result saved to the stack?
1618 ZoneList<Expression*>* subexprs = expr->values();
1619 int length = subexprs->length();
1620
1621 // Emit code to evaluate all the non-constant subexpressions and to store
1622 // them into the newly cloned array.
1623 int array_index = 0;
1624 for (; array_index < length; array_index++) {
1625 Expression* subexpr = subexprs->at(array_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001626 DCHECK(!subexpr->IsSpread());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627
1628 // If the subexpression is a literal or a simple materialized literal it
1629 // is already set in the cloned array.
1630 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1631
1632 if (!result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001633 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 result_saved = true;
1635 }
1636 VisitForAccumulatorValue(subexpr);
1637
1638 __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1639 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1640 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1641 Handle<Code> ic =
1642 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1643 CallIC(ic);
1644
1645 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1646 }
1647
1648 // In case the array literal contains spread expressions it has two parts. The
1649 // first part is the "static" array which has a literal index is handled
1650 // above. The second part is the part after the first spread expression
1651 // (inclusive) and these elements gets appended to the array. Note that the
1652 // number elements an iterable produces is unknown ahead of time.
1653 if (array_index < length && result_saved) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001654 PopOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001655 result_saved = false;
1656 }
1657 for (; array_index < length; array_index++) {
1658 Expression* subexpr = subexprs->at(array_index);
1659
Ben Murdoch097c5b22016-05-18 11:27:45 +01001660 PushOperand(x0);
1661 DCHECK(!subexpr->IsSpread());
1662 VisitForStackValue(subexpr);
1663 CallRuntimeWithOperands(Runtime::kAppendElement);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001664
1665 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1666 }
1667
1668 if (result_saved) {
1669 context()->PlugTOS();
1670 } else {
1671 context()->Plug(x0);
1672 }
1673}
1674
1675
1676void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1677 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1678
1679 Comment cmnt(masm_, "[ Assignment");
1680 SetExpressionPosition(expr, INSERT_BREAK);
1681
1682 Property* property = expr->target()->AsProperty();
1683 LhsKind assign_type = Property::GetAssignType(property);
1684
1685 // Evaluate LHS expression.
1686 switch (assign_type) {
1687 case VARIABLE:
1688 // Nothing to do here.
1689 break;
1690 case NAMED_PROPERTY:
1691 if (expr->is_compound()) {
1692 // We need the receiver both on the stack and in the register.
1693 VisitForStackValue(property->obj());
1694 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1695 } else {
1696 VisitForStackValue(property->obj());
1697 }
1698 break;
1699 case NAMED_SUPER_PROPERTY:
1700 VisitForStackValue(
1701 property->obj()->AsSuperPropertyReference()->this_var());
1702 VisitForAccumulatorValue(
1703 property->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001704 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705 if (expr->is_compound()) {
1706 const Register scratch = x10;
1707 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001708 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 }
1710 break;
1711 case KEYED_SUPER_PROPERTY:
1712 VisitForStackValue(
1713 property->obj()->AsSuperPropertyReference()->this_var());
1714 VisitForStackValue(
1715 property->obj()->AsSuperPropertyReference()->home_object());
1716 VisitForAccumulatorValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001717 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001718 if (expr->is_compound()) {
1719 const Register scratch1 = x10;
1720 const Register scratch2 = x11;
1721 __ Peek(scratch1, 2 * kPointerSize);
1722 __ Peek(scratch2, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001723 PushOperands(scratch1, scratch2, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001724 }
1725 break;
1726 case KEYED_PROPERTY:
1727 if (expr->is_compound()) {
1728 VisitForStackValue(property->obj());
1729 VisitForStackValue(property->key());
1730 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1731 __ Peek(LoadDescriptor::NameRegister(), 0);
1732 } else {
1733 VisitForStackValue(property->obj());
1734 VisitForStackValue(property->key());
1735 }
1736 break;
1737 }
1738
1739 // For compound assignments we need another deoptimization point after the
1740 // variable/property load.
1741 if (expr->is_compound()) {
1742 { AccumulatorValueContext context(this);
1743 switch (assign_type) {
1744 case VARIABLE:
1745 EmitVariableLoad(expr->target()->AsVariableProxy());
1746 PrepareForBailout(expr->target(), TOS_REG);
1747 break;
1748 case NAMED_PROPERTY:
1749 EmitNamedPropertyLoad(property);
1750 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1751 break;
1752 case NAMED_SUPER_PROPERTY:
1753 EmitNamedSuperPropertyLoad(property);
1754 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1755 break;
1756 case KEYED_SUPER_PROPERTY:
1757 EmitKeyedSuperPropertyLoad(property);
1758 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1759 break;
1760 case KEYED_PROPERTY:
1761 EmitKeyedPropertyLoad(property);
1762 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1763 break;
1764 }
1765 }
1766
1767 Token::Value op = expr->binary_op();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001768 PushOperand(x0); // Left operand goes on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001769 VisitForAccumulatorValue(expr->value());
1770
1771 AccumulatorValueContext context(this);
1772 if (ShouldInlineSmiCase(op)) {
1773 EmitInlineSmiBinaryOp(expr->binary_operation(),
1774 op,
1775 expr->target(),
1776 expr->value());
1777 } else {
1778 EmitBinaryOp(expr->binary_operation(), op);
1779 }
1780
1781 // Deoptimization point in case the binary operation may have side effects.
1782 PrepareForBailout(expr->binary_operation(), TOS_REG);
1783 } else {
1784 VisitForAccumulatorValue(expr->value());
1785 }
1786
1787 SetExpressionPosition(expr);
1788
1789 // Store the value.
1790 switch (assign_type) {
1791 case VARIABLE:
1792 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1793 expr->op(), expr->AssignmentSlot());
1794 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1795 context()->Plug(x0);
1796 break;
1797 case NAMED_PROPERTY:
1798 EmitNamedPropertyAssignment(expr);
1799 break;
1800 case NAMED_SUPER_PROPERTY:
1801 EmitNamedSuperPropertyStore(property);
1802 context()->Plug(x0);
1803 break;
1804 case KEYED_SUPER_PROPERTY:
1805 EmitKeyedSuperPropertyStore(property);
1806 context()->Plug(x0);
1807 break;
1808 case KEYED_PROPERTY:
1809 EmitKeyedPropertyAssignment(expr);
1810 break;
1811 }
1812}
1813
1814
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001815void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1816 Token::Value op,
1817 Expression* left_expr,
1818 Expression* right_expr) {
1819 Label done, both_smis, stub_call;
1820
1821 // Get the arguments.
1822 Register left = x1;
1823 Register right = x0;
1824 Register result = x0;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001825 PopOperand(left);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001826
1827 // Perform combined smi check on both operands.
1828 __ Orr(x10, left, right);
1829 JumpPatchSite patch_site(masm_);
1830 patch_site.EmitJumpIfSmi(x10, &both_smis);
1831
1832 __ Bind(&stub_call);
1833
Ben Murdoch097c5b22016-05-18 11:27:45 +01001834 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001835 {
1836 Assembler::BlockPoolsScope scope(masm_);
1837 CallIC(code, expr->BinaryOperationFeedbackId());
1838 patch_site.EmitPatchInfo();
1839 }
1840 __ B(&done);
1841
1842 __ Bind(&both_smis);
1843 // Smi case. This code works in the same way as the smi-smi case in the type
1844 // recording binary operation stub, see
1845 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1846 // TODO(all): That doesn't exist any more. Where are the comments?
1847 //
1848 // The set of operations that needs to be supported here is controlled by
1849 // FullCodeGenerator::ShouldInlineSmiCase().
1850 switch (op) {
1851 case Token::SAR:
1852 __ Ubfx(right, right, kSmiShift, 5);
1853 __ Asr(result, left, right);
1854 __ Bic(result, result, kSmiShiftMask);
1855 break;
1856 case Token::SHL:
1857 __ Ubfx(right, right, kSmiShift, 5);
1858 __ Lsl(result, left, right);
1859 break;
1860 case Token::SHR:
1861 // If `left >>> right` >= 0x80000000, the result is not representable in a
1862 // signed 32-bit smi.
1863 __ Ubfx(right, right, kSmiShift, 5);
1864 __ Lsr(x10, left, right);
1865 __ Tbnz(x10, kXSignBit, &stub_call);
1866 __ Bic(result, x10, kSmiShiftMask);
1867 break;
1868 case Token::ADD:
1869 __ Adds(x10, left, right);
1870 __ B(vs, &stub_call);
1871 __ Mov(result, x10);
1872 break;
1873 case Token::SUB:
1874 __ Subs(x10, left, right);
1875 __ B(vs, &stub_call);
1876 __ Mov(result, x10);
1877 break;
1878 case Token::MUL: {
1879 Label not_minus_zero, done;
1880 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
1881 STATIC_ASSERT(kSmiTag == 0);
1882 __ Smulh(x10, left, right);
1883 __ Cbnz(x10, &not_minus_zero);
1884 __ Eor(x11, left, right);
1885 __ Tbnz(x11, kXSignBit, &stub_call);
1886 __ Mov(result, x10);
1887 __ B(&done);
1888 __ Bind(&not_minus_zero);
1889 __ Cls(x11, x10);
1890 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
1891 __ B(lt, &stub_call);
1892 __ SmiTag(result, x10);
1893 __ Bind(&done);
1894 break;
1895 }
1896 case Token::BIT_OR:
1897 __ Orr(result, left, right);
1898 break;
1899 case Token::BIT_AND:
1900 __ And(result, left, right);
1901 break;
1902 case Token::BIT_XOR:
1903 __ Eor(result, left, right);
1904 break;
1905 default:
1906 UNREACHABLE();
1907 }
1908
1909 __ Bind(&done);
1910 context()->Plug(x0);
1911}
1912
1913
1914void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001915 PopOperand(x1);
1916 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001917 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
1918 {
1919 Assembler::BlockPoolsScope scope(masm_);
1920 CallIC(code, expr->BinaryOperationFeedbackId());
1921 patch_site.EmitPatchInfo();
1922 }
1923 context()->Plug(x0);
1924}
1925
1926
1927void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001928 for (int i = 0; i < lit->properties()->length(); i++) {
1929 ObjectLiteral::Property* property = lit->properties()->at(i);
1930 Expression* value = property->value();
1931
Ben Murdoch097c5b22016-05-18 11:27:45 +01001932 Register scratch = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933 if (property->is_static()) {
1934 __ Peek(scratch, kPointerSize); // constructor
1935 } else {
1936 __ Peek(scratch, 0); // prototype
1937 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001938 PushOperand(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001939 EmitPropertyKey(property, lit->GetIdForProperty(i));
1940
1941 // The static prototype property is read only. We handle the non computed
1942 // property name case in the parser. Since this is the only case where we
1943 // need to check for an own read only property we special case this so we do
1944 // not need to do this for every property.
1945 if (property->is_static() && property->is_computed_name()) {
1946 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1947 __ Push(x0);
1948 }
1949
1950 VisitForStackValue(value);
1951 if (NeedsHomeObject(value)) {
1952 EmitSetHomeObject(value, 2, property->GetSlot());
1953 }
1954
1955 switch (property->kind()) {
1956 case ObjectLiteral::Property::CONSTANT:
1957 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1958 case ObjectLiteral::Property::PROTOTYPE:
1959 UNREACHABLE();
1960 case ObjectLiteral::Property::COMPUTED:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001961 PushOperand(Smi::FromInt(DONT_ENUM));
1962 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1963 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001964 break;
1965
1966 case ObjectLiteral::Property::GETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001967 PushOperand(Smi::FromInt(DONT_ENUM));
1968 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001969 break;
1970
1971 case ObjectLiteral::Property::SETTER:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001972 PushOperand(Smi::FromInt(DONT_ENUM));
1973 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001974 break;
1975
1976 default:
1977 UNREACHABLE();
1978 }
1979 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001980}
1981
1982
1983void FullCodeGenerator::EmitAssignment(Expression* expr,
1984 FeedbackVectorSlot slot) {
1985 DCHECK(expr->IsValidReferenceExpressionOrThis());
1986
1987 Property* prop = expr->AsProperty();
1988 LhsKind assign_type = Property::GetAssignType(prop);
1989
1990 switch (assign_type) {
1991 case VARIABLE: {
1992 Variable* var = expr->AsVariableProxy()->var();
1993 EffectContext context(this);
1994 EmitVariableAssignment(var, Token::ASSIGN, slot);
1995 break;
1996 }
1997 case NAMED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001998 PushOperand(x0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001999 VisitForAccumulatorValue(prop->obj());
2000 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2001 // this copy.
2002 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002003 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002004 __ Mov(StoreDescriptor::NameRegister(),
2005 Operand(prop->key()->AsLiteral()->value()));
2006 EmitLoadStoreICSlot(slot);
2007 CallStoreIC();
2008 break;
2009 }
2010 case NAMED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002011 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002012 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2013 VisitForAccumulatorValue(
2014 prop->obj()->AsSuperPropertyReference()->home_object());
2015 // stack: value, this; x0: home_object
2016 Register scratch = x10;
2017 Register scratch2 = x11;
2018 __ mov(scratch, result_register()); // home_object
2019 __ Peek(x0, kPointerSize); // value
2020 __ Peek(scratch2, 0); // this
2021 __ Poke(scratch2, kPointerSize); // this
2022 __ Poke(scratch, 0); // home_object
2023 // stack: this, home_object; x0: value
2024 EmitNamedSuperPropertyStore(prop);
2025 break;
2026 }
2027 case KEYED_SUPER_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002028 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002029 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2030 VisitForStackValue(
2031 prop->obj()->AsSuperPropertyReference()->home_object());
2032 VisitForAccumulatorValue(prop->key());
2033 Register scratch = x10;
2034 Register scratch2 = x11;
2035 __ Peek(scratch2, 2 * kPointerSize); // value
2036 // stack: value, this, home_object; x0: key, x11: value
2037 __ Peek(scratch, kPointerSize); // this
2038 __ Poke(scratch, 2 * kPointerSize);
2039 __ Peek(scratch, 0); // home_object
2040 __ Poke(scratch, kPointerSize);
2041 __ Poke(x0, 0);
2042 __ Move(x0, scratch2);
2043 // stack: this, home_object, key; x0: value.
2044 EmitKeyedSuperPropertyStore(prop);
2045 break;
2046 }
2047 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01002048 PushOperand(x0); // Preserve value.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002049 VisitForStackValue(prop->obj());
2050 VisitForAccumulatorValue(prop->key());
2051 __ Mov(StoreDescriptor::NameRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002052 PopOperands(StoreDescriptor::ReceiverRegister(),
2053 StoreDescriptor::ValueRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002054 EmitLoadStoreICSlot(slot);
2055 Handle<Code> ic =
2056 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2057 CallIC(ic);
2058 break;
2059 }
2060 }
2061 context()->Plug(x0);
2062}
2063
2064
2065void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2066 Variable* var, MemOperand location) {
2067 __ Str(result_register(), location);
2068 if (var->IsContextSlot()) {
2069 // RecordWrite may destroy all its register arguments.
2070 __ Mov(x10, result_register());
2071 int offset = Context::SlotOffset(var->index());
2072 __ RecordWriteContextSlot(
2073 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2074 }
2075}
2076
2077
2078void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2079 FeedbackVectorSlot slot) {
2080 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2081 if (var->IsUnallocated()) {
2082 // Global var, const, or let.
2083 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2084 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2085 EmitLoadStoreICSlot(slot);
2086 CallStoreIC();
2087
2088 } else if (var->mode() == LET && op != Token::INIT) {
2089 // Non-initializing assignment to let variable needs a write barrier.
2090 DCHECK(!var->IsLookupSlot());
2091 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2092 Label assign;
2093 MemOperand location = VarOperand(var, x1);
2094 __ Ldr(x10, location);
2095 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2096 __ Mov(x10, Operand(var->name()));
2097 __ Push(x10);
2098 __ CallRuntime(Runtime::kThrowReferenceError);
2099 // Perform the assignment.
2100 __ Bind(&assign);
2101 EmitStoreToStackLocalOrContextSlot(var, location);
2102
2103 } else if (var->mode() == CONST && op != Token::INIT) {
2104 // Assignment to const variable needs a write barrier.
2105 DCHECK(!var->IsLookupSlot());
2106 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2107 Label const_error;
2108 MemOperand location = VarOperand(var, x1);
2109 __ Ldr(x10, location);
2110 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2111 __ Mov(x10, Operand(var->name()));
2112 __ Push(x10);
2113 __ CallRuntime(Runtime::kThrowReferenceError);
2114 __ Bind(&const_error);
2115 __ CallRuntime(Runtime::kThrowConstAssignError);
2116
2117 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2118 // Initializing assignment to const {this} needs a write barrier.
2119 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2120 Label uninitialized_this;
2121 MemOperand location = VarOperand(var, x1);
2122 __ Ldr(x10, location);
2123 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2124 __ Mov(x0, Operand(var->name()));
2125 __ Push(x0);
2126 __ CallRuntime(Runtime::kThrowReferenceError);
2127 __ bind(&uninitialized_this);
2128 EmitStoreToStackLocalOrContextSlot(var, location);
2129
2130 } else if (!var->is_const_mode() ||
2131 (var->mode() == CONST && op == Token::INIT)) {
2132 if (var->IsLookupSlot()) {
2133 // Assignment to var.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002134 __ Push(var->name());
2135 __ Push(x0);
2136 __ CallRuntime(is_strict(language_mode())
2137 ? Runtime::kStoreLookupSlot_Strict
2138 : Runtime::kStoreLookupSlot_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002139 } else {
2140 // Assignment to var or initializing assignment to let/const in harmony
2141 // mode.
2142 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2143 MemOperand location = VarOperand(var, x1);
2144 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2145 __ Ldr(x10, location);
2146 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2147 __ Check(eq, kLetBindingReInitialization);
2148 }
2149 EmitStoreToStackLocalOrContextSlot(var, location);
2150 }
2151
2152 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2153 // Const initializers need a write barrier.
2154 DCHECK(!var->IsParameter()); // No const parameters.
2155 if (var->IsLookupSlot()) {
2156 __ Mov(x1, Operand(var->name()));
2157 __ Push(x0, cp, x1);
2158 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2159 } else {
2160 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2161 Label skip;
2162 MemOperand location = VarOperand(var, x1);
2163 __ Ldr(x10, location);
2164 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2165 EmitStoreToStackLocalOrContextSlot(var, location);
2166 __ Bind(&skip);
2167 }
2168
2169 } else {
2170 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2171 if (is_strict(language_mode())) {
2172 __ CallRuntime(Runtime::kThrowConstAssignError);
2173 }
2174 // Silently ignore store in sloppy mode.
2175 }
2176}
2177
2178
2179void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2180 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2181 // Assignment to a property, using a named store IC.
2182 Property* prop = expr->target()->AsProperty();
2183 DCHECK(prop != NULL);
2184 DCHECK(prop->key()->IsLiteral());
2185
2186 __ Mov(StoreDescriptor::NameRegister(),
2187 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002188 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002189 EmitLoadStoreICSlot(expr->AssignmentSlot());
2190 CallStoreIC();
2191
2192 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2193 context()->Plug(x0);
2194}
2195
2196
2197void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2198 // Assignment to named property of super.
2199 // x0 : value
2200 // stack : receiver ('this'), home_object
2201 DCHECK(prop != NULL);
2202 Literal* key = prop->key()->AsLiteral();
2203 DCHECK(key != NULL);
2204
Ben Murdoch097c5b22016-05-18 11:27:45 +01002205 PushOperand(key->value());
2206 PushOperand(x0);
2207 CallRuntimeWithOperands(is_strict(language_mode())
2208 ? Runtime::kStoreToSuper_Strict
2209 : Runtime::kStoreToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210}
2211
2212
2213void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2214 // Assignment to named property of super.
2215 // x0 : value
2216 // stack : receiver ('this'), home_object, key
2217 DCHECK(prop != NULL);
2218
Ben Murdoch097c5b22016-05-18 11:27:45 +01002219 PushOperand(x0);
2220 CallRuntimeWithOperands(is_strict(language_mode())
2221 ? Runtime::kStoreKeyedToSuper_Strict
2222 : Runtime::kStoreKeyedToSuper_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002223}
2224
2225
2226void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2227 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2228 // Assignment to a property, using a keyed store IC.
2229
2230 // TODO(all): Could we pass this in registers rather than on the stack?
Ben Murdoch097c5b22016-05-18 11:27:45 +01002231 PopOperands(StoreDescriptor::NameRegister(),
2232 StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002233 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2234
2235 Handle<Code> ic =
2236 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2237 EmitLoadStoreICSlot(expr->AssignmentSlot());
2238 CallIC(ic);
2239
2240 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2241 context()->Plug(x0);
2242}
2243
2244
2245void FullCodeGenerator::VisitProperty(Property* expr) {
2246 Comment cmnt(masm_, "[ Property");
2247 SetExpressionPosition(expr);
2248 Expression* key = expr->key();
2249
2250 if (key->IsPropertyName()) {
2251 if (!expr->IsSuperAccess()) {
2252 VisitForAccumulatorValue(expr->obj());
2253 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2254 EmitNamedPropertyLoad(expr);
2255 } else {
2256 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2257 VisitForStackValue(
2258 expr->obj()->AsSuperPropertyReference()->home_object());
2259 EmitNamedSuperPropertyLoad(expr);
2260 }
2261 } else {
2262 if (!expr->IsSuperAccess()) {
2263 VisitForStackValue(expr->obj());
2264 VisitForAccumulatorValue(expr->key());
2265 __ Move(LoadDescriptor::NameRegister(), x0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 PopOperand(LoadDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002267 EmitKeyedPropertyLoad(expr);
2268 } else {
2269 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2270 VisitForStackValue(
2271 expr->obj()->AsSuperPropertyReference()->home_object());
2272 VisitForStackValue(expr->key());
2273 EmitKeyedSuperPropertyLoad(expr);
2274 }
2275 }
2276 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2277 context()->Plug(x0);
2278}
2279
2280
2281void FullCodeGenerator::CallIC(Handle<Code> code,
2282 TypeFeedbackId ast_id) {
2283 ic_total_count_++;
2284 // All calls must have a predictable size in full-codegen code to ensure that
2285 // the debugger can patch them correctly.
2286 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2287}
2288
2289
2290// Code common for calls using the IC.
2291void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2292 ASM_LOCATION("FullCodeGenerator::EmitCallWithLoadIC");
2293 Expression* callee = expr->expression();
2294
2295 // Get the target function.
2296 ConvertReceiverMode convert_mode;
2297 if (callee->IsVariableProxy()) {
2298 { StackValueContext context(this);
2299 EmitVariableLoad(callee->AsVariableProxy());
2300 PrepareForBailout(callee, NO_REGISTERS);
2301 }
2302 // Push undefined as receiver. This is patched in the method prologue if it
2303 // is a sloppy mode method.
2304 {
2305 UseScratchRegisterScope temps(masm_);
2306 Register temp = temps.AcquireX();
2307 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002308 PushOperand(temp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002309 }
2310 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2311 } else {
2312 // Load the function from the receiver.
2313 DCHECK(callee->IsProperty());
2314 DCHECK(!callee->AsProperty()->IsSuperAccess());
2315 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2316 EmitNamedPropertyLoad(callee->AsProperty());
2317 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2318 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002319 PopOperand(x10);
2320 PushOperands(x0, x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002321 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2322 }
2323
2324 EmitCall(expr, convert_mode);
2325}
2326
2327
2328void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2329 ASM_LOCATION("FullCodeGenerator::EmitSuperCallWithLoadIC");
2330 Expression* callee = expr->expression();
2331 DCHECK(callee->IsProperty());
2332 Property* prop = callee->AsProperty();
2333 DCHECK(prop->IsSuperAccess());
2334 SetExpressionPosition(prop);
2335
2336 Literal* key = prop->key()->AsLiteral();
2337 DCHECK(!key->value()->IsSmi());
2338
2339 // Load the function from the receiver.
2340 const Register scratch = x10;
2341 SuperPropertyReference* super_ref =
2342 callee->AsProperty()->obj()->AsSuperPropertyReference();
2343 VisitForStackValue(super_ref->home_object());
2344 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002345 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002346 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002347 PushOperands(x0, scratch);
2348 PushOperand(key->value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002349
2350 // Stack here:
2351 // - home_object
2352 // - this (receiver)
2353 // - this (receiver) <-- LoadFromSuper will pop here and below.
2354 // - home_object
Ben Murdoch097c5b22016-05-18 11:27:45 +01002355 // - key
2356 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002357
2358 // Replace home_object with target function.
2359 __ Poke(x0, kPointerSize);
2360
2361 // Stack here:
2362 // - target function
2363 // - this (receiver)
2364 EmitCall(expr);
2365}
2366
2367
2368// Code common for calls using the IC.
2369void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2370 Expression* key) {
2371 ASM_LOCATION("FullCodeGenerator::EmitKeyedCallWithLoadIC");
2372 // Load the key.
2373 VisitForAccumulatorValue(key);
2374
2375 Expression* callee = expr->expression();
2376
2377 // Load the function from the receiver.
2378 DCHECK(callee->IsProperty());
2379 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2380 __ Move(LoadDescriptor::NameRegister(), x0);
2381 EmitKeyedPropertyLoad(callee->AsProperty());
2382 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2383
2384 // Push the target function under the receiver.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002385 PopOperand(x10);
2386 PushOperands(x0, x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002387
2388 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2389}
2390
2391
2392void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2393 ASM_LOCATION("FullCodeGenerator::EmitKeyedSuperCallWithLoadIC");
2394 Expression* callee = expr->expression();
2395 DCHECK(callee->IsProperty());
2396 Property* prop = callee->AsProperty();
2397 DCHECK(prop->IsSuperAccess());
2398 SetExpressionPosition(prop);
2399
2400 // Load the function from the receiver.
2401 const Register scratch = x10;
2402 SuperPropertyReference* super_ref =
2403 callee->AsProperty()->obj()->AsSuperPropertyReference();
2404 VisitForStackValue(super_ref->home_object());
2405 VisitForAccumulatorValue(super_ref->this_var());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002406 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002407 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002408 PushOperands(x0, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002409 VisitForStackValue(prop->key());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002410
2411 // Stack here:
2412 // - home_object
2413 // - this (receiver)
2414 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2415 // - home_object
2416 // - key
Ben Murdoch097c5b22016-05-18 11:27:45 +01002417 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002418
2419 // Replace home_object with target function.
2420 __ Poke(x0, kPointerSize);
2421
2422 // Stack here:
2423 // - target function
2424 // - this (receiver)
2425 EmitCall(expr);
2426}
2427
2428
2429void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2430 ASM_LOCATION("FullCodeGenerator::EmitCall");
2431 // Load the arguments.
2432 ZoneList<Expression*>* args = expr->arguments();
2433 int arg_count = args->length();
2434 for (int i = 0; i < arg_count; i++) {
2435 VisitForStackValue(args->at(i));
2436 }
2437
2438 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
Ben Murdochda12d292016-06-02 14:46:10 +01002439 SetCallPosition(expr, expr->tail_call_mode());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002440 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2441 if (FLAG_trace) {
2442 __ CallRuntime(Runtime::kTraceTailCall);
2443 }
2444 // Update profiling counters before the tail call since we will
2445 // not return to this function.
2446 EmitProfilingCounterHandlingForReturnSequence(true);
2447 }
2448 Handle<Code> ic =
2449 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2450 .code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002451 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2452 __ Peek(x1, (arg_count + 1) * kXRegSize);
2453 // Don't assign a type feedback id to the IC, since type feedback is provided
2454 // by the vector above.
2455 CallIC(ic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002456 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002457
2458 RecordJSReturnSite(expr);
2459 // Restore context register.
2460 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2461 context()->DropAndPlug(1, x0);
2462}
2463
2464
2465void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2466 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2467 // Prepare to push a copy of the first argument or undefined if it doesn't
2468 // exist.
2469 if (arg_count > 0) {
2470 __ Peek(x9, arg_count * kXRegSize);
2471 } else {
2472 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2473 }
2474
2475 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2476
2477 // Prepare to push the language mode.
2478 __ Mov(x11, Smi::FromInt(language_mode()));
2479 // Prepare to push the start position of the scope the calls resides in.
2480 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2481
2482 // Push.
2483 __ Push(x9, x10, x11, x12);
2484
2485 // Do the runtime call.
2486 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2487}
2488
2489
2490// See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2491void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2492 VariableProxy* callee = expr->expression()->AsVariableProxy();
2493 if (callee->var()->IsLookupSlot()) {
2494 Label slow, done;
2495 SetExpressionPosition(callee);
2496 // Generate code for loading from variables potentially shadowed
2497 // by eval-introduced variables.
2498 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2499
2500 __ Bind(&slow);
2501 // Call the runtime to find the function to call (returned in x0)
2502 // and the object holding it (returned in x1).
Ben Murdoch097c5b22016-05-18 11:27:45 +01002503 __ Push(callee->name());
2504 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2505 PushOperands(x0, x1); // Receiver, function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002506 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2507
2508 // If fast case code has been generated, emit code to push the
2509 // function and receiver and have the slow path jump around this
2510 // code.
2511 if (done.is_linked()) {
2512 Label call;
2513 __ B(&call);
2514 __ Bind(&done);
2515 // Push function.
2516 // The receiver is implicitly the global receiver. Indicate this
2517 // by passing the undefined to the call function stub.
2518 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2519 __ Push(x0, x1);
2520 __ Bind(&call);
2521 }
2522 } else {
2523 VisitForStackValue(callee);
2524 // refEnv.WithBaseObject()
2525 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002526 PushOperand(x10); // Reserved receiver slot.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002527 }
2528}
2529
2530
2531void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2532 ASM_LOCATION("FullCodeGenerator::EmitPossiblyEvalCall");
2533 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2534 // to resolve the function we need to call. Then we call the resolved
2535 // function using the given arguments.
2536 ZoneList<Expression*>* args = expr->arguments();
2537 int arg_count = args->length();
2538
2539 PushCalleeAndWithBaseObject(expr);
2540
2541 // Push the arguments.
2542 for (int i = 0; i < arg_count; i++) {
2543 VisitForStackValue(args->at(i));
2544 }
2545
2546 // Push a copy of the function (found below the arguments) and
2547 // resolve eval.
2548 __ Peek(x10, (arg_count + 1) * kPointerSize);
2549 __ Push(x10);
2550 EmitResolvePossiblyDirectEval(arg_count);
2551
2552 // Touch up the stack with the resolved function.
2553 __ Poke(x0, (arg_count + 1) * kPointerSize);
2554
2555 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2556
2557 // Record source position for debugger.
2558 SetCallPosition(expr);
2559
2560 // Call the evaluated function.
2561 __ Peek(x1, (arg_count + 1) * kXRegSize);
2562 __ Mov(x0, arg_count);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002563 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2564 expr->tail_call_mode()),
2565 RelocInfo::CODE_TARGET);
2566 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002567 RecordJSReturnSite(expr);
2568 // Restore context register.
2569 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2570 context()->DropAndPlug(1, x0);
2571}
2572
2573
2574void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2575 Comment cmnt(masm_, "[ CallNew");
2576 // According to ECMA-262, section 11.2.2, page 44, the function
2577 // expression in new calls must be evaluated before the
2578 // arguments.
2579
2580 // Push constructor on the stack. If it's not a function it's used as
2581 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2582 // ignored.
2583 DCHECK(!expr->expression()->IsSuperPropertyReference());
2584 VisitForStackValue(expr->expression());
2585
2586 // Push the arguments ("left-to-right") on the stack.
2587 ZoneList<Expression*>* args = expr->arguments();
2588 int arg_count = args->length();
2589 for (int i = 0; i < arg_count; i++) {
2590 VisitForStackValue(args->at(i));
2591 }
2592
2593 // Call the construct call builtin that handles allocation and
2594 // constructor invocation.
2595 SetConstructCallPosition(expr);
2596
2597 // Load function and argument count into x1 and x0.
2598 __ Mov(x0, arg_count);
2599 __ Peek(x1, arg_count * kXRegSize);
2600
2601 // Record call targets in unoptimized code.
2602 __ EmitLoadTypeFeedbackVector(x2);
2603 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2604
2605 CallConstructStub stub(isolate());
2606 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002607 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002608 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2609 // Restore context register.
2610 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2611 context()->Plug(x0);
2612}
2613
2614
2615void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2616 ASM_LOCATION("FullCodeGenerator::EmitSuperConstructorCall");
2617 SuperCallReference* super_call_ref =
2618 expr->expression()->AsSuperCallReference();
2619 DCHECK_NOT_NULL(super_call_ref);
2620
2621 // Push the super constructor target on the stack (may be null,
2622 // but the Construct builtin can deal with that properly).
2623 VisitForAccumulatorValue(super_call_ref->this_function_var());
2624 __ AssertFunction(result_register());
2625 __ Ldr(result_register(),
2626 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2627 __ Ldr(result_register(),
2628 FieldMemOperand(result_register(), Map::kPrototypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002629 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002630
2631 // Push the arguments ("left-to-right") on the stack.
2632 ZoneList<Expression*>* args = expr->arguments();
2633 int arg_count = args->length();
2634 for (int i = 0; i < arg_count; i++) {
2635 VisitForStackValue(args->at(i));
2636 }
2637
2638 // Call the construct call builtin that handles allocation and
2639 // constructor invocation.
2640 SetConstructCallPosition(expr);
2641
2642 // Load new target into x3.
2643 VisitForAccumulatorValue(super_call_ref->new_target_var());
2644 __ Mov(x3, result_register());
2645
2646 // Load function and argument count into x1 and x0.
2647 __ Mov(x0, arg_count);
2648 __ Peek(x1, arg_count * kXRegSize);
2649
2650 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002651 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002652
2653 RecordJSReturnSite(expr);
2654
2655 // Restore context register.
2656 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2657 context()->Plug(x0);
2658}
2659
2660
2661void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2662 ZoneList<Expression*>* args = expr->arguments();
2663 DCHECK(args->length() == 1);
2664
2665 VisitForAccumulatorValue(args->at(0));
2666
2667 Label materialize_true, materialize_false;
2668 Label* if_true = NULL;
2669 Label* if_false = NULL;
2670 Label* fall_through = NULL;
2671 context()->PrepareTest(&materialize_true, &materialize_false,
2672 &if_true, &if_false, &fall_through);
2673
2674 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2675 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2676
2677 context()->Plug(if_true, if_false);
2678}
2679
2680
2681void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2682 ZoneList<Expression*>* args = expr->arguments();
2683 DCHECK(args->length() == 1);
2684
2685 VisitForAccumulatorValue(args->at(0));
2686
2687 Label materialize_true, materialize_false;
2688 Label* if_true = NULL;
2689 Label* if_false = NULL;
2690 Label* fall_through = NULL;
2691 context()->PrepareTest(&materialize_true, &materialize_false,
2692 &if_true, &if_false, &fall_through);
2693
2694 __ JumpIfSmi(x0, if_false);
2695 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2696 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2697 Split(ge, if_true, if_false, fall_through);
2698
2699 context()->Plug(if_true, if_false);
2700}
2701
2702
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002703void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2704 ZoneList<Expression*>* args = expr->arguments();
2705 DCHECK(args->length() == 1);
2706
2707 VisitForAccumulatorValue(args->at(0));
2708
2709 Label materialize_true, materialize_false;
2710 Label* if_true = NULL;
2711 Label* if_false = NULL;
2712 Label* fall_through = NULL;
2713 context()->PrepareTest(&materialize_true, &materialize_false,
2714 &if_true, &if_false, &fall_through);
2715
2716 __ JumpIfSmi(x0, if_false);
2717 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2718 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2719 Split(eq, if_true, if_false, fall_through);
2720
2721 context()->Plug(if_true, if_false);
2722}
2723
2724
2725void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2726 ZoneList<Expression*>* args = expr->arguments();
2727 DCHECK(args->length() == 1);
2728
2729 VisitForAccumulatorValue(args->at(0));
2730
2731 Label materialize_true, materialize_false;
2732 Label* if_true = NULL;
2733 Label* if_false = NULL;
2734 Label* fall_through = NULL;
2735 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2736 &if_false, &fall_through);
2737
2738 __ JumpIfSmi(x0, if_false);
2739 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
2740 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2741 Split(eq, if_true, if_false, fall_through);
2742
2743 context()->Plug(if_true, if_false);
2744}
2745
2746
2747void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2748 ZoneList<Expression*>* args = expr->arguments();
2749 DCHECK(args->length() == 1);
2750
2751 VisitForAccumulatorValue(args->at(0));
2752
2753 Label materialize_true, materialize_false;
2754 Label* if_true = NULL;
2755 Label* if_false = NULL;
2756 Label* fall_through = NULL;
2757 context()->PrepareTest(&materialize_true, &materialize_false,
2758 &if_true, &if_false, &fall_through);
2759
2760 __ JumpIfSmi(x0, if_false);
2761 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2762 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2763 Split(eq, if_true, if_false, fall_through);
2764
2765 context()->Plug(if_true, if_false);
2766}
2767
2768
2769void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2770 ZoneList<Expression*>* args = expr->arguments();
2771 DCHECK(args->length() == 1);
2772
2773 VisitForAccumulatorValue(args->at(0));
2774
2775 Label materialize_true, materialize_false;
2776 Label* if_true = NULL;
2777 Label* if_false = NULL;
2778 Label* fall_through = NULL;
2779 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2780 &if_false, &fall_through);
2781
2782 __ JumpIfSmi(x0, if_false);
2783 __ CompareObjectType(x0, x10, x11, JS_PROXY_TYPE);
2784 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2785 Split(eq, if_true, if_false, fall_through);
2786
2787 context()->Plug(if_true, if_false);
2788}
2789
2790
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002791void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2792 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
2793 ZoneList<Expression*>* args = expr->arguments();
2794 DCHECK(args->length() == 1);
2795 Label done, null, function, non_function_constructor;
2796
2797 VisitForAccumulatorValue(args->at(0));
2798
2799 // If the object is not a JSReceiver, we return null.
2800 __ JumpIfSmi(x0, &null);
2801 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2802 __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2803 // x10: object's map.
2804 // x11: object's type.
2805 __ B(lt, &null);
2806
2807 // Return 'Function' for JSFunction objects.
Ben Murdochda12d292016-06-02 14:46:10 +01002808 __ Cmp(x11, FIRST_FUNCTION_TYPE);
2809 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2810 __ B(hs, &function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002811
2812 // Check if the constructor in the map is a JS function.
2813 Register instance_type = x14;
2814 __ GetMapConstructor(x12, x10, x13, instance_type);
2815 __ Cmp(instance_type, JS_FUNCTION_TYPE);
2816 __ B(ne, &non_function_constructor);
2817
2818 // x12 now contains the constructor function. Grab the
2819 // instance class name from there.
2820 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
2821 __ Ldr(x0,
2822 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
2823 __ B(&done);
2824
2825 // Functions have class 'Function'.
2826 __ Bind(&function);
2827 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
2828 __ B(&done);
2829
2830 // Objects with a non-function constructor have class 'Object'.
2831 __ Bind(&non_function_constructor);
2832 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
2833 __ B(&done);
2834
2835 // Non-JS objects have class null.
2836 __ Bind(&null);
2837 __ LoadRoot(x0, Heap::kNullValueRootIndex);
2838
2839 // All done.
2840 __ Bind(&done);
2841
2842 context()->Plug(x0);
2843}
2844
2845
2846void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2847 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
2848 ZoneList<Expression*>* args = expr->arguments();
2849 DCHECK(args->length() == 1);
2850 VisitForAccumulatorValue(args->at(0)); // Load the object.
2851
2852 Label done;
2853 // If the object is a smi return the object.
2854 __ JumpIfSmi(x0, &done);
2855 // If the object is not a value type, return the object.
2856 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
2857 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
2858
2859 __ Bind(&done);
2860 context()->Plug(x0);
2861}
2862
2863
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002864void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
2865 ZoneList<Expression*>* args = expr->arguments();
2866 DCHECK_EQ(3, args->length());
2867
2868 Register string = x0;
2869 Register index = x1;
2870 Register value = x2;
2871 Register scratch = x10;
2872
2873 VisitForStackValue(args->at(0)); // index
2874 VisitForStackValue(args->at(1)); // value
2875 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002876 PopOperands(value, index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002877
2878 if (FLAG_debug_code) {
2879 __ AssertSmi(value, kNonSmiValue);
2880 __ AssertSmi(index, kNonSmiIndex);
2881 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
2882 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
2883 one_byte_seq_type);
2884 }
2885
2886 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
2887 __ SmiUntag(value);
2888 __ SmiUntag(index);
2889 __ Strb(value, MemOperand(scratch, index));
2890 context()->Plug(string);
2891}
2892
2893
2894void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
2895 ZoneList<Expression*>* args = expr->arguments();
2896 DCHECK_EQ(3, args->length());
2897
2898 Register string = x0;
2899 Register index = x1;
2900 Register value = x2;
2901 Register scratch = x10;
2902
2903 VisitForStackValue(args->at(0)); // index
2904 VisitForStackValue(args->at(1)); // value
2905 VisitForAccumulatorValue(args->at(2)); // string
Ben Murdoch097c5b22016-05-18 11:27:45 +01002906 PopOperands(value, index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002907
2908 if (FLAG_debug_code) {
2909 __ AssertSmi(value, kNonSmiValue);
2910 __ AssertSmi(index, kNonSmiIndex);
2911 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
2912 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
2913 two_byte_seq_type);
2914 }
2915
2916 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
2917 __ SmiUntag(value);
2918 __ SmiUntag(index);
2919 __ Strh(value, MemOperand(scratch, index, LSL, 1));
2920 context()->Plug(string);
2921}
2922
2923
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002924void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2925 ZoneList<Expression*>* args = expr->arguments();
2926 DCHECK(args->length() == 1);
2927
2928 VisitForAccumulatorValue(args->at(0));
2929
2930 Label done;
2931 Register code = x0;
2932 Register result = x1;
2933
2934 StringCharFromCodeGenerator generator(code, result);
2935 generator.GenerateFast(masm_);
2936 __ B(&done);
2937
2938 NopRuntimeCallHelper call_helper;
2939 generator.GenerateSlow(masm_, call_helper);
2940
2941 __ Bind(&done);
2942 context()->Plug(result);
2943}
2944
2945
2946void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2947 ZoneList<Expression*>* args = expr->arguments();
2948 DCHECK(args->length() == 2);
2949
2950 VisitForStackValue(args->at(0));
2951 VisitForAccumulatorValue(args->at(1));
2952
2953 Register object = x1;
2954 Register index = x0;
2955 Register result = x3;
2956
Ben Murdoch097c5b22016-05-18 11:27:45 +01002957 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002958
2959 Label need_conversion;
2960 Label index_out_of_range;
2961 Label done;
2962 StringCharCodeAtGenerator generator(object,
2963 index,
2964 result,
2965 &need_conversion,
2966 &need_conversion,
2967 &index_out_of_range,
2968 STRING_INDEX_IS_NUMBER);
2969 generator.GenerateFast(masm_);
2970 __ B(&done);
2971
2972 __ Bind(&index_out_of_range);
2973 // When the index is out of range, the spec requires us to return NaN.
2974 __ LoadRoot(result, Heap::kNanValueRootIndex);
2975 __ B(&done);
2976
2977 __ Bind(&need_conversion);
2978 // Load the undefined value into the result register, which will
2979 // trigger conversion.
2980 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2981 __ B(&done);
2982
2983 NopRuntimeCallHelper call_helper;
2984 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2985
2986 __ Bind(&done);
2987 context()->Plug(result);
2988}
2989
2990
2991void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
2992 ZoneList<Expression*>* args = expr->arguments();
2993 DCHECK(args->length() == 2);
2994
2995 VisitForStackValue(args->at(0));
2996 VisitForAccumulatorValue(args->at(1));
2997
2998 Register object = x1;
2999 Register index = x0;
3000 Register result = x0;
3001
Ben Murdoch097c5b22016-05-18 11:27:45 +01003002 PopOperand(object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003003
3004 Label need_conversion;
3005 Label index_out_of_range;
3006 Label done;
3007 StringCharAtGenerator generator(object,
3008 index,
3009 x3,
3010 result,
3011 &need_conversion,
3012 &need_conversion,
3013 &index_out_of_range,
3014 STRING_INDEX_IS_NUMBER);
3015 generator.GenerateFast(masm_);
3016 __ B(&done);
3017
3018 __ Bind(&index_out_of_range);
3019 // When the index is out of range, the spec requires us to return
3020 // the empty string.
3021 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3022 __ B(&done);
3023
3024 __ Bind(&need_conversion);
3025 // Move smi zero into the result register, which will trigger conversion.
3026 __ Mov(result, Smi::FromInt(0));
3027 __ B(&done);
3028
3029 NopRuntimeCallHelper call_helper;
3030 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3031
3032 __ Bind(&done);
3033 context()->Plug(result);
3034}
3035
3036
3037void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3038 ASM_LOCATION("FullCodeGenerator::EmitCall");
3039 ZoneList<Expression*>* args = expr->arguments();
3040 DCHECK_LE(2, args->length());
3041 // Push target, receiver and arguments onto the stack.
3042 for (Expression* const arg : *args) {
3043 VisitForStackValue(arg);
3044 }
3045 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3046 // Move target to x1.
3047 int const argc = args->length() - 2;
3048 __ Peek(x1, (argc + 1) * kXRegSize);
3049 // Call the target.
3050 __ Mov(x0, argc);
3051 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003052 OperandStackDepthDecrement(argc + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003053 // Restore context register.
3054 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3055 // Discard the function left on TOS.
3056 context()->DropAndPlug(1, x0);
3057}
3058
3059
3060void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3061 ZoneList<Expression*>* args = expr->arguments();
3062 VisitForAccumulatorValue(args->at(0));
3063
3064 Label materialize_true, materialize_false;
3065 Label* if_true = NULL;
3066 Label* if_false = NULL;
3067 Label* fall_through = NULL;
3068 context()->PrepareTest(&materialize_true, &materialize_false,
3069 &if_true, &if_false, &fall_through);
3070
3071 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3072 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3073 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3074 Split(eq, if_true, if_false, fall_through);
3075
3076 context()->Plug(if_true, if_false);
3077}
3078
3079
3080void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3081 ZoneList<Expression*>* args = expr->arguments();
3082 DCHECK(args->length() == 1);
3083 VisitForAccumulatorValue(args->at(0));
3084
3085 __ AssertString(x0);
3086
3087 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3088 __ IndexFromHash(x10, x0);
3089
3090 context()->Plug(x0);
3091}
3092
3093
3094void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3095 ZoneList<Expression*>* args = expr->arguments();
3096 DCHECK_EQ(1, args->length());
3097 VisitForAccumulatorValue(args->at(0));
3098 __ AssertFunction(x0);
3099 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3100 __ Ldr(x0, FieldMemOperand(x0, Map::kPrototypeOffset));
3101 context()->Plug(x0);
3102}
3103
Ben Murdochda12d292016-06-02 14:46:10 +01003104void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) {
3105 DCHECK_EQ(0, expr->arguments()->length());
3106 __ LoadNativeContextSlot(Context::ORDINARY_HAS_INSTANCE_INDEX, x0);
3107 context()->Plug(x0);
3108}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003109
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003110void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3111 DCHECK(expr->arguments()->length() == 0);
3112 ExternalReference debug_is_active =
3113 ExternalReference::debug_is_active_address(isolate());
3114 __ Mov(x10, debug_is_active);
3115 __ Ldrb(x0, MemOperand(x10));
3116 __ SmiTag(x0);
3117 context()->Plug(x0);
3118}
3119
3120
3121void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3122 ZoneList<Expression*>* args = expr->arguments();
3123 DCHECK_EQ(2, args->length());
3124 VisitForStackValue(args->at(0));
3125 VisitForStackValue(args->at(1));
3126
3127 Label runtime, done;
3128
3129 Register result = x0;
3130 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime, TAG_OBJECT);
3131 Register map_reg = x1;
3132 Register result_value = x2;
3133 Register boolean_done = x3;
3134 Register empty_fixed_array = x4;
3135 Register untagged_result = x5;
3136 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
3137 __ Pop(boolean_done);
3138 __ Pop(result_value);
3139 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3140 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3141 JSObject::kElementsOffset);
3142 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3143 JSIteratorResult::kDoneOffset);
3144 __ ObjectUntag(untagged_result, result);
3145 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3146 __ Stp(empty_fixed_array, empty_fixed_array,
3147 MemOperand(untagged_result, JSObject::kPropertiesOffset));
3148 __ Stp(result_value, boolean_done,
3149 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3150 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3151 __ B(&done);
3152
3153 __ Bind(&runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003154 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003155
3156 __ Bind(&done);
3157 context()->Plug(x0);
3158}
3159
3160
3161void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
Ben Murdochda12d292016-06-02 14:46:10 +01003162 // Push function.
3163 __ LoadNativeContextSlot(expr->context_index(), x0);
3164 PushOperand(x0);
3165
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003166 // Push undefined as the receiver.
3167 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003168 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003169}
3170
3171
3172void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3173 ZoneList<Expression*>* args = expr->arguments();
3174 int arg_count = args->length();
3175
3176 SetCallPosition(expr);
3177 __ Peek(x1, (arg_count + 1) * kPointerSize);
3178 __ Mov(x0, arg_count);
3179 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3180 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003181 OperandStackDepthDecrement(arg_count + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003182
Ben Murdochda12d292016-06-02 14:46:10 +01003183 // Restore context register.
3184 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003185}
3186
3187
3188void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3189 switch (expr->op()) {
3190 case Token::DELETE: {
3191 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3192 Property* property = expr->expression()->AsProperty();
3193 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3194
3195 if (property != NULL) {
3196 VisitForStackValue(property->obj());
3197 VisitForStackValue(property->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003198 CallRuntimeWithOperands(is_strict(language_mode())
3199 ? Runtime::kDeleteProperty_Strict
3200 : Runtime::kDeleteProperty_Sloppy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003201 context()->Plug(x0);
3202 } else if (proxy != NULL) {
3203 Variable* var = proxy->var();
3204 // Delete of an unqualified identifier is disallowed in strict mode but
3205 // "delete this" is allowed.
3206 bool is_this = var->HasThisName(isolate());
3207 DCHECK(is_sloppy(language_mode()) || is_this);
3208 if (var->IsUnallocatedOrGlobalSlot()) {
3209 __ LoadGlobalObject(x12);
3210 __ Mov(x11, Operand(var->name()));
3211 __ Push(x12, x11);
3212 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3213 context()->Plug(x0);
3214 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3215 // Result of deleting non-global, non-dynamic variables is false.
3216 // The subexpression does not have side effects.
3217 context()->Plug(is_this);
3218 } else {
3219 // Non-global variable. Call the runtime to try to delete from the
3220 // context where the variable was introduced.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003221 __ Push(var->name());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003222 __ CallRuntime(Runtime::kDeleteLookupSlot);
3223 context()->Plug(x0);
3224 }
3225 } else {
3226 // Result of deleting non-property, non-variable reference is true.
3227 // The subexpression may have side effects.
3228 VisitForEffect(expr->expression());
3229 context()->Plug(true);
3230 }
3231 break;
3232 break;
3233 }
3234 case Token::VOID: {
3235 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3236 VisitForEffect(expr->expression());
3237 context()->Plug(Heap::kUndefinedValueRootIndex);
3238 break;
3239 }
3240 case Token::NOT: {
3241 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3242 if (context()->IsEffect()) {
3243 // Unary NOT has no side effects so it's only necessary to visit the
3244 // subexpression. Match the optimizing compiler by not branching.
3245 VisitForEffect(expr->expression());
3246 } else if (context()->IsTest()) {
3247 const TestContext* test = TestContext::cast(context());
3248 // The labels are swapped for the recursive call.
3249 VisitForControl(expr->expression(),
3250 test->false_label(),
3251 test->true_label(),
3252 test->fall_through());
3253 context()->Plug(test->true_label(), test->false_label());
3254 } else {
3255 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3256 // TODO(jbramley): This could be much more efficient using (for
3257 // example) the CSEL instruction.
3258 Label materialize_true, materialize_false, done;
3259 VisitForControl(expr->expression(),
3260 &materialize_false,
3261 &materialize_true,
3262 &materialize_true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003263 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003264
3265 __ Bind(&materialize_true);
3266 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3267 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3268 __ B(&done);
3269
3270 __ Bind(&materialize_false);
3271 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3272 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3273 __ B(&done);
3274
3275 __ Bind(&done);
3276 if (context()->IsStackValue()) {
3277 __ Push(result_register());
3278 }
3279 }
3280 break;
3281 }
3282 case Token::TYPEOF: {
3283 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3284 {
3285 AccumulatorValueContext context(this);
3286 VisitForTypeofValue(expr->expression());
3287 }
3288 __ Mov(x3, x0);
3289 TypeofStub typeof_stub(isolate());
3290 __ CallStub(&typeof_stub);
3291 context()->Plug(x0);
3292 break;
3293 }
3294 default:
3295 UNREACHABLE();
3296 }
3297}
3298
3299
3300void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3301 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3302
3303 Comment cmnt(masm_, "[ CountOperation");
3304
3305 Property* prop = expr->expression()->AsProperty();
3306 LhsKind assign_type = Property::GetAssignType(prop);
3307
3308 // Evaluate expression and get value.
3309 if (assign_type == VARIABLE) {
3310 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3311 AccumulatorValueContext context(this);
3312 EmitVariableLoad(expr->expression()->AsVariableProxy());
3313 } else {
3314 // Reserve space for result of postfix operation.
3315 if (expr->is_postfix() && !context()->IsEffect()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003316 PushOperand(xzr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003317 }
3318 switch (assign_type) {
3319 case NAMED_PROPERTY: {
3320 // Put the object both on the stack and in the register.
3321 VisitForStackValue(prop->obj());
3322 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
3323 EmitNamedPropertyLoad(prop);
3324 break;
3325 }
3326
3327 case NAMED_SUPER_PROPERTY: {
3328 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3329 VisitForAccumulatorValue(
3330 prop->obj()->AsSuperPropertyReference()->home_object());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003331 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003332 const Register scratch = x10;
3333 __ Peek(scratch, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003334 PushOperands(scratch, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003335 EmitNamedSuperPropertyLoad(prop);
3336 break;
3337 }
3338
3339 case KEYED_SUPER_PROPERTY: {
3340 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3341 VisitForStackValue(
3342 prop->obj()->AsSuperPropertyReference()->home_object());
3343 VisitForAccumulatorValue(prop->key());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003344 PushOperand(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003345 const Register scratch1 = x10;
3346 const Register scratch2 = x11;
3347 __ Peek(scratch1, 2 * kPointerSize);
3348 __ Peek(scratch2, kPointerSize);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003349 PushOperands(scratch1, scratch2, result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003350 EmitKeyedSuperPropertyLoad(prop);
3351 break;
3352 }
3353
3354 case KEYED_PROPERTY: {
3355 VisitForStackValue(prop->obj());
3356 VisitForStackValue(prop->key());
3357 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
3358 __ Peek(LoadDescriptor::NameRegister(), 0);
3359 EmitKeyedPropertyLoad(prop);
3360 break;
3361 }
3362
3363 case VARIABLE:
3364 UNREACHABLE();
3365 }
3366 }
3367
3368 // We need a second deoptimization point after loading the value
3369 // in case evaluating the property load my have a side effect.
3370 if (assign_type == VARIABLE) {
3371 PrepareForBailout(expr->expression(), TOS_REG);
3372 } else {
3373 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
3374 }
3375
3376 // Inline smi case if we are in a loop.
3377 Label stub_call, done;
3378 JumpPatchSite patch_site(masm_);
3379
3380 int count_value = expr->op() == Token::INC ? 1 : -1;
3381 if (ShouldInlineSmiCase(expr->op())) {
3382 Label slow;
3383 patch_site.EmitJumpIfNotSmi(x0, &slow);
3384
3385 // Save result for postfix expressions.
3386 if (expr->is_postfix()) {
3387 if (!context()->IsEffect()) {
3388 // Save the result on the stack. If we have a named or keyed property we
3389 // store the result under the receiver that is currently on top of the
3390 // stack.
3391 switch (assign_type) {
3392 case VARIABLE:
3393 __ Push(x0);
3394 break;
3395 case NAMED_PROPERTY:
3396 __ Poke(x0, kPointerSize);
3397 break;
3398 case NAMED_SUPER_PROPERTY:
3399 __ Poke(x0, kPointerSize * 2);
3400 break;
3401 case KEYED_PROPERTY:
3402 __ Poke(x0, kPointerSize * 2);
3403 break;
3404 case KEYED_SUPER_PROPERTY:
3405 __ Poke(x0, kPointerSize * 3);
3406 break;
3407 }
3408 }
3409 }
3410
3411 __ Adds(x0, x0, Smi::FromInt(count_value));
3412 __ B(vc, &done);
3413 // Call stub. Undo operation first.
3414 __ Sub(x0, x0, Smi::FromInt(count_value));
3415 __ B(&stub_call);
3416 __ Bind(&slow);
3417 }
Ben Murdochda12d292016-06-02 14:46:10 +01003418
3419 // Convert old value into a number.
3420 ToNumberStub convert_stub(isolate());
3421 __ CallStub(&convert_stub);
3422 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003423
3424 // Save result for postfix expressions.
3425 if (expr->is_postfix()) {
3426 if (!context()->IsEffect()) {
3427 // Save the result on the stack. If we have a named or keyed property
3428 // we store the result under the receiver that is currently on top
3429 // of the stack.
3430 switch (assign_type) {
3431 case VARIABLE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01003432 PushOperand(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003433 break;
3434 case NAMED_PROPERTY:
3435 __ Poke(x0, kXRegSize);
3436 break;
3437 case NAMED_SUPER_PROPERTY:
3438 __ Poke(x0, 2 * kXRegSize);
3439 break;
3440 case KEYED_PROPERTY:
3441 __ Poke(x0, 2 * kXRegSize);
3442 break;
3443 case KEYED_SUPER_PROPERTY:
3444 __ Poke(x0, 3 * kXRegSize);
3445 break;
3446 }
3447 }
3448 }
3449
3450 __ Bind(&stub_call);
3451 __ Mov(x1, x0);
3452 __ Mov(x0, Smi::FromInt(count_value));
3453
3454 SetExpressionPosition(expr);
3455
3456 {
3457 Assembler::BlockPoolsScope scope(masm_);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003458 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003459 CallIC(code, expr->CountBinOpFeedbackId());
3460 patch_site.EmitPatchInfo();
3461 }
3462 __ Bind(&done);
3463
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003464 // Store the value returned in x0.
3465 switch (assign_type) {
3466 case VARIABLE:
3467 if (expr->is_postfix()) {
3468 { EffectContext context(this);
3469 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3470 Token::ASSIGN, expr->CountSlot());
3471 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3472 context.Plug(x0);
3473 }
3474 // For all contexts except EffectConstant We have the result on
3475 // top of the stack.
3476 if (!context()->IsEffect()) {
3477 context()->PlugTOS();
3478 }
3479 } else {
3480 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3481 Token::ASSIGN, expr->CountSlot());
3482 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3483 context()->Plug(x0);
3484 }
3485 break;
3486 case NAMED_PROPERTY: {
3487 __ Mov(StoreDescriptor::NameRegister(),
3488 Operand(prop->key()->AsLiteral()->value()));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003489 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003490 EmitLoadStoreICSlot(expr->CountSlot());
3491 CallStoreIC();
3492 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3493 if (expr->is_postfix()) {
3494 if (!context()->IsEffect()) {
3495 context()->PlugTOS();
3496 }
3497 } else {
3498 context()->Plug(x0);
3499 }
3500 break;
3501 }
3502 case NAMED_SUPER_PROPERTY: {
3503 EmitNamedSuperPropertyStore(prop);
3504 if (expr->is_postfix()) {
3505 if (!context()->IsEffect()) {
3506 context()->PlugTOS();
3507 }
3508 } else {
3509 context()->Plug(x0);
3510 }
3511 break;
3512 }
3513 case KEYED_SUPER_PROPERTY: {
3514 EmitKeyedSuperPropertyStore(prop);
3515 if (expr->is_postfix()) {
3516 if (!context()->IsEffect()) {
3517 context()->PlugTOS();
3518 }
3519 } else {
3520 context()->Plug(x0);
3521 }
3522 break;
3523 }
3524 case KEYED_PROPERTY: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01003525 PopOperand(StoreDescriptor::NameRegister());
3526 PopOperand(StoreDescriptor::ReceiverRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003527 Handle<Code> ic =
3528 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3529 EmitLoadStoreICSlot(expr->CountSlot());
3530 CallIC(ic);
3531 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3532 if (expr->is_postfix()) {
3533 if (!context()->IsEffect()) {
3534 context()->PlugTOS();
3535 }
3536 } else {
3537 context()->Plug(x0);
3538 }
3539 break;
3540 }
3541 }
3542}
3543
3544
3545void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3546 Expression* sub_expr,
3547 Handle<String> check) {
3548 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
3549 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
3550 Label materialize_true, materialize_false;
3551 Label* if_true = NULL;
3552 Label* if_false = NULL;
3553 Label* fall_through = NULL;
3554 context()->PrepareTest(&materialize_true, &materialize_false,
3555 &if_true, &if_false, &fall_through);
3556
3557 { AccumulatorValueContext context(this);
3558 VisitForTypeofValue(sub_expr);
3559 }
3560 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3561
3562 Factory* factory = isolate()->factory();
3563 if (String::Equals(check, factory->number_string())) {
3564 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
3565 __ JumpIfSmi(x0, if_true);
3566 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3567 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
3568 Split(eq, if_true, if_false, fall_through);
3569 } else if (String::Equals(check, factory->string_string())) {
3570 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
3571 __ JumpIfSmi(x0, if_false);
3572 __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
3573 Split(lt, if_true, if_false, fall_through);
3574 } else if (String::Equals(check, factory->symbol_string())) {
3575 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
3576 __ JumpIfSmi(x0, if_false);
3577 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
3578 Split(eq, if_true, if_false, fall_through);
3579 } else if (String::Equals(check, factory->boolean_string())) {
3580 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
3581 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
3582 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
3583 Split(eq, if_true, if_false, fall_through);
3584 } else if (String::Equals(check, factory->undefined_string())) {
3585 ASM_LOCATION(
3586 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
Ben Murdoch097c5b22016-05-18 11:27:45 +01003587 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003588 __ JumpIfSmi(x0, if_false);
3589 // Check for undetectable objects => true.
3590 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3591 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3592 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3593 fall_through);
3594 } else if (String::Equals(check, factory->function_string())) {
3595 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
3596 __ JumpIfSmi(x0, if_false);
3597 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3598 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3599 __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3600 __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true,
3601 if_false, fall_through);
3602 } else if (String::Equals(check, factory->object_string())) {
3603 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
3604 __ JumpIfSmi(x0, if_false);
3605 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3606 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3607 __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, if_false, lt);
3608 // Check for callable or undetectable objects => false.
3609 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
3610 __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable),
3611 if_true, if_false, fall_through);
3612// clang-format off
3613#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3614 } else if (String::Equals(check, factory->type##_string())) { \
3615 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
3616 #type "_string"); \
3617 __ JumpIfSmi(x0, if_true); \
3618 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); \
3619 __ CompareRoot(x0, Heap::k##Type##MapRootIndex); \
3620 Split(eq, if_true, if_false, fall_through);
3621 SIMD128_TYPES(SIMD128_TYPE)
3622#undef SIMD128_TYPE
3623 // clang-format on
3624 } else {
3625 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
3626 if (if_false != fall_through) __ B(if_false);
3627 }
3628 context()->Plug(if_true, if_false);
3629}
3630
3631
3632void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3633 Comment cmnt(masm_, "[ CompareOperation");
3634 SetExpressionPosition(expr);
3635
3636 // Try to generate an optimized comparison with a literal value.
3637 // TODO(jbramley): This only checks common values like NaN or undefined.
3638 // Should it also handle ARM64 immediate operands?
3639 if (TryLiteralCompare(expr)) {
3640 return;
3641 }
3642
3643 // Assign labels according to context()->PrepareTest.
3644 Label materialize_true;
3645 Label materialize_false;
3646 Label* if_true = NULL;
3647 Label* if_false = NULL;
3648 Label* fall_through = NULL;
3649 context()->PrepareTest(&materialize_true, &materialize_false,
3650 &if_true, &if_false, &fall_through);
3651
3652 Token::Value op = expr->op();
3653 VisitForStackValue(expr->left());
3654 switch (op) {
3655 case Token::IN:
3656 VisitForStackValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003657 CallRuntimeWithOperands(Runtime::kHasProperty);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003658 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3659 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3660 Split(eq, if_true, if_false, fall_through);
3661 break;
3662
3663 case Token::INSTANCEOF: {
3664 VisitForAccumulatorValue(expr->right());
Ben Murdoch097c5b22016-05-18 11:27:45 +01003665 PopOperand(x1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003666 InstanceOfStub stub(isolate());
3667 __ CallStub(&stub);
3668 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3669 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3670 Split(eq, if_true, if_false, fall_through);
3671 break;
3672 }
3673
3674 default: {
3675 VisitForAccumulatorValue(expr->right());
3676 Condition cond = CompareIC::ComputeCondition(op);
3677
3678 // Pop the stack value.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003679 PopOperand(x1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003680
3681 JumpPatchSite patch_site(masm_);
3682 if (ShouldInlineSmiCase(op)) {
3683 Label slow_case;
3684 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
3685 __ Cmp(x1, x0);
3686 Split(cond, if_true, if_false, NULL);
3687 __ Bind(&slow_case);
3688 }
3689
Ben Murdoch097c5b22016-05-18 11:27:45 +01003690 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003691 CallIC(ic, expr->CompareOperationFeedbackId());
3692 patch_site.EmitPatchInfo();
3693 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3694 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
3695 }
3696 }
3697
3698 // Convert the result of the comparison into one expected for this
3699 // expression's context.
3700 context()->Plug(if_true, if_false);
3701}
3702
3703
3704void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3705 Expression* sub_expr,
3706 NilValue nil) {
3707 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
3708 Label materialize_true, materialize_false;
3709 Label* if_true = NULL;
3710 Label* if_false = NULL;
3711 Label* fall_through = NULL;
3712 context()->PrepareTest(&materialize_true, &materialize_false,
3713 &if_true, &if_false, &fall_through);
3714
3715 VisitForAccumulatorValue(sub_expr);
3716 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3717
3718 if (expr->op() == Token::EQ_STRICT) {
3719 Heap::RootListIndex nil_value = nil == kNullValue ?
3720 Heap::kNullValueRootIndex :
3721 Heap::kUndefinedValueRootIndex;
3722 __ CompareRoot(x0, nil_value);
3723 Split(eq, if_true, if_false, fall_through);
3724 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003725 __ JumpIfSmi(x0, if_false);
3726 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3727 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3728 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3729 fall_through);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003730 }
3731
3732 context()->Plug(if_true, if_false);
3733}
3734
3735
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003736void FullCodeGenerator::VisitYield(Yield* expr) {
3737 Comment cmnt(masm_, "[ Yield");
3738 SetExpressionPosition(expr);
3739
3740 // Evaluate yielded value first; the initial iterator definition depends on
3741 // this. It stays on the stack while we update the iterator.
3742 VisitForStackValue(expr->expression());
3743
3744 // TODO(jbramley): Tidy this up once the merge is done, using named registers
3745 // and suchlike. The implementation changes a little by bleeding_edge so I
3746 // don't want to spend too much time on it now.
3747
Ben Murdochda12d292016-06-02 14:46:10 +01003748 Label suspend, continuation, post_runtime, resume;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003749
Ben Murdochda12d292016-06-02 14:46:10 +01003750 __ B(&suspend);
3751 // TODO(jbramley): This label is bound here because the following code
3752 // looks at its pos(). Is it possible to do something more efficient here,
3753 // perhaps using Adr?
3754 __ Bind(&continuation);
3755 // When we arrive here, the stack top is the resume mode and
3756 // result_register() holds the input value (the argument given to the
3757 // respective resume operation).
3758 __ RecordGeneratorContinuation();
3759 __ Pop(x1);
3760 __ Cmp(x1, Smi::FromInt(JSGeneratorObject::RETURN));
3761 __ B(ne, &resume);
3762 __ Push(result_register());
3763 EmitCreateIteratorResult(true);
3764 EmitUnwindAndReturn();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003765
Ben Murdochda12d292016-06-02 14:46:10 +01003766 __ Bind(&suspend);
3767 OperandStackDepthIncrement(1); // Not popped on this path.
3768 VisitForAccumulatorValue(expr->generator_object());
3769 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
3770 __ Mov(x1, Smi::FromInt(continuation.pos()));
3771 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
3772 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
3773 __ Mov(x1, cp);
3774 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
3775 kLRHasBeenSaved, kDontSaveFPRegs);
3776 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
3777 __ Cmp(__ StackPointer(), x1);
3778 __ B(eq, &post_runtime);
3779 __ Push(x0); // generator object
3780 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
3781 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3782 __ Bind(&post_runtime);
3783 PopOperand(result_register());
3784 EmitReturnSequence();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003785
Ben Murdochda12d292016-06-02 14:46:10 +01003786 __ Bind(&resume);
3787 context()->Plug(result_register());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003788}
3789
3790
3791void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
3792 Expression *value,
3793 JSGeneratorObject::ResumeMode resume_mode) {
3794 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
3795 Register generator_object = x1;
3796 Register the_hole = x2;
3797 Register operand_stack_size = w3;
3798 Register function = x4;
3799
3800 // The value stays in x0, and is ultimately read by the resumed generator, as
3801 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
3802 // is read to throw the value when the resumed generator is already closed. x1
3803 // will hold the generator object until the activation has been resumed.
3804 VisitForStackValue(generator);
3805 VisitForAccumulatorValue(value);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003806 PopOperand(generator_object);
3807
3808 // Store input value into generator object.
3809 __ Str(result_register(),
3810 FieldMemOperand(x1, JSGeneratorObject::kInputOffset));
3811 __ Mov(x2, result_register());
3812 __ RecordWriteField(x1, JSGeneratorObject::kInputOffset, x2, x3,
3813 kLRHasBeenSaved, kDontSaveFPRegs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003814
3815 // Load suspended function and context.
3816 __ Ldr(cp, FieldMemOperand(generator_object,
3817 JSGeneratorObject::kContextOffset));
3818 __ Ldr(function, FieldMemOperand(generator_object,
3819 JSGeneratorObject::kFunctionOffset));
3820
3821 // Load receiver and store as the first argument.
3822 __ Ldr(x10, FieldMemOperand(generator_object,
3823 JSGeneratorObject::kReceiverOffset));
3824 __ Push(x10);
3825
Ben Murdochda12d292016-06-02 14:46:10 +01003826 // Push holes for arguments to generator function. Since the parser forced
3827 // context allocation for any variables in generators, the actual argument
3828 // values have already been copied into the context and these dummy values
3829 // will never be used.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003830 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
3831
3832 // The number of arguments is stored as an int32_t, and -1 is a marker
3833 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
3834 // extension to correctly handle it. However, in this case, we operate on
3835 // 32-bit W registers, so extension isn't required.
3836 __ Ldr(w10, FieldMemOperand(x10,
3837 SharedFunctionInfo::kFormalParameterCountOffset));
3838 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
3839 __ PushMultipleTimes(the_hole, w10);
3840
3841 // Enter a new JavaScript frame, and initialize its slots as they were when
3842 // the generator was suspended.
3843 Label resume_frame, done;
3844 __ Bl(&resume_frame);
3845 __ B(&done);
3846
3847 __ Bind(&resume_frame);
3848 __ Push(lr, // Return address.
3849 fp, // Caller's frame pointer.
3850 cp, // Callee's context.
3851 function); // Callee's JS Function.
3852 __ Add(fp, __ StackPointer(), kPointerSize * 2);
3853
3854 // Load and untag the operand stack size.
3855 __ Ldr(x10, FieldMemOperand(generator_object,
3856 JSGeneratorObject::kOperandStackOffset));
3857 __ Ldr(operand_stack_size,
3858 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
3859
3860 // If we are sending a value and there is no operand stack, we can jump back
3861 // in directly.
3862 if (resume_mode == JSGeneratorObject::NEXT) {
3863 Label slow_resume;
3864 __ Cbnz(operand_stack_size, &slow_resume);
3865 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3866 __ Ldrsw(x11,
3867 UntagSmiFieldMemOperand(generator_object,
3868 JSGeneratorObject::kContinuationOffset));
3869 __ Add(x10, x10, x11);
3870 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
3871 __ Str(x12, FieldMemOperand(generator_object,
3872 JSGeneratorObject::kContinuationOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003873 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003874 __ Br(x10);
3875
3876 __ Bind(&slow_resume);
3877 }
3878
3879 // Otherwise, we push holes for the operand stack and call the runtime to fix
3880 // up the stack and the handlers.
3881 __ PushMultipleTimes(the_hole, operand_stack_size);
3882
3883 __ Mov(x10, Smi::FromInt(resume_mode));
Ben Murdoch097c5b22016-05-18 11:27:45 +01003884 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003885 __ Push(generator_object, result_register(), x10);
3886 __ CallRuntime(Runtime::kResumeJSGeneratorObject);
3887 // Not reached: the runtime call returns elsewhere.
3888 __ Unreachable();
3889
3890 __ Bind(&done);
3891 context()->Plug(result_register());
3892}
3893
Ben Murdoch097c5b22016-05-18 11:27:45 +01003894void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
3895 OperandStackDepthIncrement(2);
3896 __ Push(reg1, reg2);
3897}
3898
3899void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
3900 Register reg3) {
3901 OperandStackDepthIncrement(3);
3902 __ Push(reg1, reg2, reg3);
3903}
3904
3905void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
3906 OperandStackDepthDecrement(2);
3907 __ Pop(reg1, reg2);
3908}
3909
3910void FullCodeGenerator::EmitOperandStackDepthCheck() {
3911 if (FLAG_debug_code) {
3912 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
3913 operand_stack_depth_ * kPointerSize;
3914 __ Sub(x0, fp, jssp);
3915 __ Cmp(x0, Operand(expected_diff));
3916 __ Assert(eq, kUnexpectedStackDepth);
3917 }
3918}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003919
3920void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
3921 Label allocate, done_allocate;
3922
3923 // Allocate and populate an object with this form: { value: VAL, done: DONE }
3924
3925 Register result = x0;
3926 __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate, TAG_OBJECT);
3927 __ B(&done_allocate);
3928
3929 __ Bind(&allocate);
3930 __ Push(Smi::FromInt(JSIteratorResult::kSize));
3931 __ CallRuntime(Runtime::kAllocateInNewSpace);
3932
3933 __ Bind(&done_allocate);
3934 Register map_reg = x1;
3935 Register result_value = x2;
3936 Register boolean_done = x3;
3937 Register empty_fixed_array = x4;
3938 Register untagged_result = x5;
3939 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
Ben Murdochda12d292016-06-02 14:46:10 +01003940 PopOperand(result_value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003941 __ LoadRoot(boolean_done,
3942 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
3943 __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3944 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3945 JSObject::kElementsOffset);
3946 STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3947 JSIteratorResult::kDoneOffset);
3948 __ ObjectUntag(untagged_result, result);
3949 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3950 __ Stp(empty_fixed_array, empty_fixed_array,
3951 MemOperand(untagged_result, JSObject::kPropertiesOffset));
3952 __ Stp(result_value, boolean_done,
3953 MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3954 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3955}
3956
3957
3958// TODO(all): I don't like this method.
3959// It seems to me that in too many places x0 is used in place of this.
3960// Also, this function is not suitable for all places where x0 should be
3961// abstracted (eg. when used as an argument). But some places assume that the
3962// first argument register is x0, and use this function instead.
3963// Considering that most of the register allocation is hard-coded in the
3964// FullCodeGen, that it is unlikely we will need to change it extensively, and
3965// that abstracting the allocation through functions would not yield any
3966// performance benefit, I think the existence of this function is debatable.
3967Register FullCodeGenerator::result_register() {
3968 return x0;
3969}
3970
3971
3972Register FullCodeGenerator::context_register() {
3973 return cp;
3974}
3975
Ben Murdochda12d292016-06-02 14:46:10 +01003976void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3977 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3978 __ Ldr(value, MemOperand(fp, frame_offset));
3979}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003980
3981void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3982 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3983 __ Str(value, MemOperand(fp, frame_offset));
3984}
3985
3986
3987void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3988 __ Ldr(dst, ContextMemOperand(cp, context_index));
3989}
3990
3991
3992void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3993 Scope* closure_scope = scope()->ClosureScope();
3994 if (closure_scope->is_script_scope() ||
3995 closure_scope->is_module_scope()) {
3996 // Contexts nested in the native context have a canonical empty function
3997 // as their closure, not the anonymous closure containing the global
3998 // code.
3999 DCHECK(kSmiTag == 0);
4000 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, x10);
4001 } else if (closure_scope->is_eval_scope()) {
4002 // Contexts created by a call to eval have the same closure as the
4003 // context calling eval, not the anonymous closure containing the eval
4004 // code. Fetch it from the context.
4005 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4006 } else {
4007 DCHECK(closure_scope->is_function_scope());
4008 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4009 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004010 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004011}
4012
4013
4014void FullCodeGenerator::EnterFinallyBlock() {
4015 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4016 DCHECK(!result_register().is(x10));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004017 // Store pending message while executing finally block.
4018 ExternalReference pending_message_obj =
4019 ExternalReference::address_of_pending_message_obj(isolate());
4020 __ Mov(x10, pending_message_obj);
4021 __ Ldr(x10, MemOperand(x10));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004022 PushOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004023
4024 ClearPendingMessage();
4025}
4026
4027
4028void FullCodeGenerator::ExitFinallyBlock() {
4029 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4030 DCHECK(!result_register().is(x10));
4031
4032 // Restore pending message from stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004033 PopOperand(x10);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004034 ExternalReference pending_message_obj =
4035 ExternalReference::address_of_pending_message_obj(isolate());
4036 __ Mov(x13, pending_message_obj);
4037 __ Str(x10, MemOperand(x13));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004038}
4039
4040
4041void FullCodeGenerator::ClearPendingMessage() {
4042 DCHECK(!result_register().is(x10));
4043 ExternalReference pending_message_obj =
4044 ExternalReference::address_of_pending_message_obj(isolate());
4045 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
4046 __ Mov(x13, pending_message_obj);
4047 __ Str(x10, MemOperand(x13));
4048}
4049
4050
Ben Murdoch097c5b22016-05-18 11:27:45 +01004051void FullCodeGenerator::DeferredCommands::EmitCommands() {
4052 __ Pop(result_register(), x1); // Restore the accumulator and get the token.
4053 for (DeferredCommand cmd : commands_) {
4054 Label skip;
4055 __ Cmp(x1, Operand(Smi::FromInt(cmd.token)));
4056 __ B(ne, &skip);
4057 switch (cmd.command) {
4058 case kReturn:
4059 codegen_->EmitUnwindAndReturn();
4060 break;
4061 case kThrow:
4062 __ Push(result_register());
4063 __ CallRuntime(Runtime::kReThrow);
4064 break;
4065 case kContinue:
4066 codegen_->EmitContinue(cmd.target);
4067 break;
4068 case kBreak:
4069 codegen_->EmitBreak(cmd.target);
4070 break;
4071 }
4072 __ bind(&skip);
4073 }
4074}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004075
4076#undef __
4077
4078
4079void BackEdgeTable::PatchAt(Code* unoptimized_code,
4080 Address pc,
4081 BackEdgeState target_state,
4082 Code* replacement_code) {
4083 // Turn the jump into a nop.
4084 Address branch_address = pc - 3 * kInstructionSize;
4085 Isolate* isolate = unoptimized_code->GetIsolate();
4086 PatchingAssembler patcher(isolate, branch_address, 1);
4087
4088 DCHECK(Instruction::Cast(branch_address)
4089 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4090 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4091 Instruction::Cast(branch_address)->ImmPCOffset() ==
4092 6 * kInstructionSize));
4093
4094 switch (target_state) {
4095 case INTERRUPT:
4096 // <decrement profiling counter>
4097 // .. .. .. .. b.pl ok
4098 // .. .. .. .. ldr x16, pc+<interrupt stub address>
4099 // .. .. .. .. blr x16
4100 // ... more instructions.
4101 // ok-label
4102 // Jump offset is 6 instructions.
4103 patcher.b(6, pl);
4104 break;
4105 case ON_STACK_REPLACEMENT:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004106 // <decrement profiling counter>
4107 // .. .. .. .. mov x0, x0 (NOP)
4108 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4109 // .. .. .. .. blr x16
4110 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4111 break;
4112 }
4113
4114 // Replace the call address.
4115 Instruction* load = Instruction::Cast(pc)->preceding(2);
4116 Address interrupt_address_pointer =
4117 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4118 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
4119 reinterpret_cast<uint64_t>(
4120 isolate->builtins()->OnStackReplacement()->entry())) ||
4121 (Memory::uint64_at(interrupt_address_pointer) ==
4122 reinterpret_cast<uint64_t>(
4123 isolate->builtins()->InterruptCheck()->entry())) ||
4124 (Memory::uint64_at(interrupt_address_pointer) ==
4125 reinterpret_cast<uint64_t>(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004126 isolate->builtins()->OnStackReplacement()->entry())));
4127 Memory::uint64_at(interrupt_address_pointer) =
4128 reinterpret_cast<uint64_t>(replacement_code->entry());
4129
4130 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4131 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4132}
4133
4134
4135BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4136 Isolate* isolate,
4137 Code* unoptimized_code,
4138 Address pc) {
4139 // TODO(jbramley): There should be some extra assertions here (as in the ARM
4140 // back-end), but this function is gone in bleeding_edge so it might not
4141 // matter anyway.
4142 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4143
4144 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4145 Instruction* load = Instruction::Cast(pc)->preceding(2);
4146 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4147 load->ImmPCOffset());
4148 if (entry == reinterpret_cast<uint64_t>(
4149 isolate->builtins()->OnStackReplacement()->entry())) {
4150 return ON_STACK_REPLACEMENT;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004151 } else {
4152 UNREACHABLE();
4153 }
4154 }
4155
4156 return INTERRUPT;
4157}
4158
4159
4160} // namespace internal
4161} // namespace v8
4162
4163#endif // V8_TARGET_ARCH_ARM64