blob: be372e65a674e8856bcf981b0aaef97f30b00d45 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM64
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16
17
18#define __ ACCESS_MASM(masm)
19
20
21// Load the built-in Array function from the current context.
22static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025}
26
27
28// Load the built-in InternalArray function from the current context.
29static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
30 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033}
34
35
36void Builtins::Generate_Adaptor(MacroAssembler* masm,
37 CFunctionId id,
38 BuiltinExtraArguments extra_args) {
39 // ----------- S t a t e -------------
40 // -- x0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 // -- x1 : target
42 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 // -- sp[0] : last argument
44 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000045 // -- sp[4 * (argc - 1)] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046 // -- sp[4 * argc] : receiver
47 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000048 __ AssertFunction(x1);
49
50 // Make sure we operate in the context of the called function (for example
51 // ConstructStubs implemented in C++ will be run in the context of the caller
52 // instead of the callee, due to the way that [[Construct]] is defined for
53 // ordinary functions).
54 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055
56 // Insert extra arguments.
57 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 switch (extra_args) {
59 case BuiltinExtraArguments::kTarget:
60 __ Push(x1);
61 ++num_extra_args;
62 break;
63 case BuiltinExtraArguments::kNewTarget:
64 __ Push(x3);
65 ++num_extra_args;
66 break;
67 case BuiltinExtraArguments::kTargetAndNewTarget:
68 __ Push(x1, x3);
69 num_extra_args += 2;
70 break;
71 case BuiltinExtraArguments::kNone:
72 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 }
74
75 // JumpToExternalReference expects x0 to contain the number of arguments
76 // including the receiver and the extra arguments.
77 __ Add(x0, x0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
80}
81
82
83void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
84 // ----------- S t a t e -------------
85 // -- x0 : number of arguments
86 // -- lr : return address
87 // -- sp[...]: constructor arguments
88 // -----------------------------------
89 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
90 Label generic_array_code;
91
92 // Get the InternalArray function.
93 GenerateLoadInternalArrayFunction(masm, x1);
94
95 if (FLAG_debug_code) {
96 // Initial map for the builtin InternalArray functions should be maps.
97 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
98 __ Tst(x10, kSmiTagMask);
99 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
100 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
102 }
103
104 // Run the native code for the InternalArray function called as a normal
105 // function.
106 InternalArrayConstructorStub stub(masm->isolate());
107 __ TailCallStub(&stub);
108}
109
110
111void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
112 // ----------- S t a t e -------------
113 // -- x0 : number of arguments
114 // -- lr : return address
115 // -- sp[...]: constructor arguments
116 // -----------------------------------
117 ASM_LOCATION("Builtins::Generate_ArrayCode");
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
119
120 // Get the Array function.
121 GenerateLoadArrayFunction(masm, x1);
122
123 if (FLAG_debug_code) {
124 // Initial map for the builtin Array functions should be maps.
125 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
126 __ Tst(x10, kSmiTagMask);
127 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
128 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
129 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130 }
131
132 // Run the native code for the Array function called as a normal function.
133 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 __ Mov(x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 ArrayConstructorStub stub(masm->isolate());
136 __ TailCallStub(&stub);
137}
138
139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100141void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
142 // ----------- S t a t e -------------
143 // -- x0 : number of arguments
144 // -- lr : return address
145 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
146 // -- sp[(argc + 1) * 8] : receiver
147 // -----------------------------------
148 ASM_LOCATION("Builtins::Generate_MathMaxMin");
149
150 Heap::RootListIndex const root_index =
151 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
152 : Heap::kMinusInfinityValueRootIndex;
153
154 // Load the accumulator with the default return value (either -Infinity or
155 // +Infinity), with the tagged value in x1 and the double value in d1.
156 __ LoadRoot(x1, root_index);
157 __ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
158
159 // Remember how many slots to drop (including the receiver).
160 __ Add(x4, x0, 1);
161
162 Label done_loop, loop;
163 __ Bind(&loop);
164 {
165 // Check if all parameters done.
166 __ Subs(x0, x0, 1);
167 __ B(lt, &done_loop);
168
169 // Load the next parameter tagged value into x2.
170 __ Peek(x2, Operand(x0, LSL, kPointerSizeLog2));
171
172 // Load the double value of the parameter into d2, maybe converting the
173 // parameter to a number first using the ToNumberStub if necessary.
174 Label convert_smi, convert_number, done_convert;
175 __ JumpIfSmi(x2, &convert_smi);
176 __ JumpIfHeapNumber(x2, &convert_number);
177 {
178 // Parameter is not a Number, use the ToNumberStub to convert it.
179 FrameScope scope(masm, StackFrame::INTERNAL);
180 __ SmiTag(x0);
181 __ SmiTag(x4);
182 __ Push(x0, x1, x4);
183 __ Mov(x0, x2);
184 ToNumberStub stub(masm->isolate());
185 __ CallStub(&stub);
186 __ Mov(x2, x0);
187 __ Pop(x4, x1, x0);
188 {
189 // Restore the double accumulator value (d1).
190 Label done_restore;
191 __ SmiUntagToDouble(d1, x1, kSpeculativeUntag);
192 __ JumpIfSmi(x1, &done_restore);
193 __ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
194 __ Bind(&done_restore);
195 }
196 __ SmiUntag(x4);
197 __ SmiUntag(x0);
198 }
199 __ AssertNumber(x2);
200 __ JumpIfSmi(x2, &convert_smi);
201
202 __ Bind(&convert_number);
203 __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
204 __ B(&done_convert);
205
206 __ Bind(&convert_smi);
207 __ SmiUntagToDouble(d2, x2);
208 __ Bind(&done_convert);
209
210 // We can use a single fmin/fmax for the operation itself, but we then need
211 // to work out which HeapNumber (or smi) the result came from.
212 __ Fmov(x11, d1);
213 if (kind == MathMaxMinKind::kMin) {
214 __ Fmin(d1, d1, d2);
215 } else {
216 DCHECK(kind == MathMaxMinKind::kMax);
217 __ Fmax(d1, d1, d2);
218 }
219 __ Fmov(x10, d1);
220 __ Cmp(x10, x11);
221 __ Csel(x1, x1, x2, eq);
222 __ B(&loop);
223 }
224
225 __ Bind(&done_loop);
226 __ Mov(x0, x1);
227 __ Drop(x4);
228 __ Ret();
229}
230
231// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000232void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000233 // ----------- S t a t e -------------
234 // -- x0 : number of arguments
235 // -- x1 : constructor function
236 // -- lr : return address
237 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
238 // -- sp[argc * 8] : receiver
239 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 ASM_LOCATION("Builtins::Generate_NumberConstructor");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242 // 1. Load the first argument into x0 and get rid of the rest (including the
243 // receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 Label no_arguments;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000245 {
246 __ Cbz(x0, &no_arguments);
247 __ Sub(x0, x0, 1);
248 __ Drop(x0);
249 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 // 2a. Convert first argument to number.
253 ToNumberStub stub(masm->isolate());
254 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 // 2b. No arguments, return +0 (already in x0).
257 __ Bind(&no_arguments);
258 __ Drop(1);
259 __ Ret();
260}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000261
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263// static
264void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
265 // ----------- S t a t e -------------
266 // -- x0 : number of arguments
267 // -- x1 : constructor function
268 // -- x3 : new target
269 // -- lr : return address
270 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
271 // -- sp[argc * 8] : receiver
272 // -----------------------------------
273 ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
274
275 // 1. Make sure we operate in the context of the called function.
276 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
277
278 // 2. Load the first argument into x2 and get rid of the rest (including the
279 // receiver).
280 {
281 Label no_arguments, done;
282 __ Cbz(x0, &no_arguments);
283 __ Sub(x0, x0, 1);
284 __ Drop(x0);
285 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
286 __ B(&done);
287 __ Bind(&no_arguments);
288 __ Drop(1);
289 __ Mov(x2, Smi::FromInt(0));
290 __ Bind(&done);
291 }
292
293 // 3. Make sure x2 is a number.
294 {
295 Label done_convert;
296 __ JumpIfSmi(x2, &done_convert);
297 __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
298 {
299 FrameScope scope(masm, StackFrame::INTERNAL);
300 __ Push(x1, x3);
301 __ Move(x0, x2);
302 ToNumberStub stub(masm->isolate());
303 __ CallStub(&stub);
304 __ Move(x2, x0);
305 __ Pop(x3, x1);
306 }
307 __ Bind(&done_convert);
308 }
309
310 // 4. Check if new target and constructor differ.
311 Label new_object;
312 __ Cmp(x1, x3);
313 __ B(ne, &new_object);
314
315 // 5. Allocate a JSValue wrapper for the number.
316 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 __ Ret();
318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 // 6. Fallback to the runtime to create new object.
320 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000321 {
322 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100323 __ Push(x2); // first argument
324 FastNewObjectStub stub(masm->isolate());
325 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
329 __ Ret();
330}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332
333// static
334void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
335 // ----------- S t a t e -------------
336 // -- x0 : number of arguments
337 // -- x1 : constructor function
338 // -- lr : return address
339 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
340 // -- sp[argc * 8] : receiver
341 // -----------------------------------
342 ASM_LOCATION("Builtins::Generate_StringConstructor");
343
344 // 1. Load the first argument into x0 and get rid of the rest (including the
345 // receiver).
346 Label no_arguments;
347 {
348 __ Cbz(x0, &no_arguments);
349 __ Sub(x0, x0, 1);
350 __ Drop(x0);
351 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
352 }
353
354 // 2a. At least one argument, return x0 if it's a string, otherwise
355 // dispatch to appropriate conversion.
356 Label to_string, symbol_descriptive_string;
357 {
358 __ JumpIfSmi(x0, &to_string);
359 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
360 __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
361 __ B(hi, &to_string);
362 __ B(eq, &symbol_descriptive_string);
363 __ Ret();
364 }
365
366 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 __ Bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 {
369 __ LoadRoot(x0, Heap::kempty_stringRootIndex);
370 __ Drop(1);
371 __ Ret();
372 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 // 3a. Convert x0 to a string.
375 __ Bind(&to_string);
376 {
377 ToStringStub stub(masm->isolate());
378 __ TailCallStub(&stub);
379 }
380
381 // 3b. Convert symbol in x0 to a string.
382 __ Bind(&symbol_descriptive_string);
383 {
384 __ Push(x0);
385 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
386 }
387}
388
389
390// static
391void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
392 // ----------- S t a t e -------------
393 // -- x0 : number of arguments
394 // -- x1 : constructor function
395 // -- x3 : new target
396 // -- lr : return address
397 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
398 // -- sp[argc * 8] : receiver
399 // -----------------------------------
400 ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
401
402 // 1. Make sure we operate in the context of the called function.
403 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
404
405 // 2. Load the first argument into x2 and get rid of the rest (including the
406 // receiver).
407 {
408 Label no_arguments, done;
409 __ Cbz(x0, &no_arguments);
410 __ Sub(x0, x0, 1);
411 __ Drop(x0);
412 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
413 __ B(&done);
414 __ Bind(&no_arguments);
415 __ Drop(1);
416 __ LoadRoot(x2, Heap::kempty_stringRootIndex);
417 __ Bind(&done);
418 }
419
420 // 3. Make sure x2 is a string.
421 {
422 Label convert, done_convert;
423 __ JumpIfSmi(x2, &convert);
424 __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
425 __ Bind(&convert);
426 {
427 FrameScope scope(masm, StackFrame::INTERNAL);
428 ToStringStub stub(masm->isolate());
429 __ Push(x1, x3);
430 __ Move(x0, x2);
431 __ CallStub(&stub);
432 __ Move(x2, x0);
433 __ Pop(x3, x1);
434 }
435 __ Bind(&done_convert);
436 }
437
438 // 4. Check if new target and constructor differ.
439 Label new_object;
440 __ Cmp(x1, x3);
441 __ B(ne, &new_object);
442
443 // 5. Allocate a JSValue wrapper for the string.
444 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
445 __ Ret();
446
447 // 6. Fallback to the runtime to create new object.
448 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 {
450 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100451 __ Push(x2); // first argument
452 FastNewObjectStub stub(masm->isolate());
453 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000454 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 __ Ret();
458}
459
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
461 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
462 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
463 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
464 __ Br(x2);
465}
466
Ben Murdoch097c5b22016-05-18 11:27:45 +0100467static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
468 Runtime::FunctionId function_id) {
469 // ----------- S t a t e -------------
470 // -- x0 : argument count (preserved for callee)
471 // -- x1 : target function (preserved for callee)
472 // -- x3 : new target (preserved for callee)
473 // -----------------------------------
474 {
475 FrameScope scope(masm, StackFrame::INTERNAL);
476 // Push a copy of the target function and the new target.
477 // Push another copy as a parameter to the runtime call.
478 __ SmiTag(x0);
479 __ Push(x0, x1, x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481 __ CallRuntime(function_id, 1);
482 __ Move(x2, x0);
483
484 // Restore target function and new target.
485 __ Pop(x3, x1, x0);
486 __ SmiUntag(x0);
487 }
488
489 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
490 __ Br(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000491}
492
493
494void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
495 // Checking whether the queued function is ready for install is optional,
496 // since we come across interrupts and stack checks elsewhere. However, not
497 // checking may delay installing ready functions, and always checking would be
498 // quite expensive. A good compromise is to first check against stack limit as
499 // a cue for an interrupt signal.
500 Label ok;
501 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
502 __ B(hs, &ok);
503
Ben Murdoch097c5b22016-05-18 11:27:45 +0100504 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505
506 __ Bind(&ok);
507 GenerateTailCallToSharedCode(masm);
508}
509
510
511static void Generate_JSConstructStubHelper(MacroAssembler* masm,
512 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100513 bool create_implicit_receiver,
514 bool check_derived_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515 // ----------- S t a t e -------------
516 // -- x0 : number of arguments
517 // -- x1 : constructor function
518 // -- x2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 // -- lr : return address
Ben Murdochda12d292016-06-02 14:46:10 +0100521 // -- cp : context pointer
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522 // -- sp[...]: constructor arguments
523 // -----------------------------------
524
525 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000526
527 Isolate* isolate = masm->isolate();
528
529 // Enter a construct frame.
530 {
531 FrameScope scope(masm, StackFrame::CONSTRUCT);
532
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 // Preserve the four incoming parameters on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 Register argc = x0;
535 Register constructor = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 Register allocation_site = x2;
537 Register new_target = x3;
538
539 // Preserve the incoming parameters on the stack.
540 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
Ben Murdochda12d292016-06-02 14:46:10 +0100541 __ Push(cp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 __ SmiTag(argc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 __ Push(allocation_site, argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100546 // Allocate the new receiver object.
547 __ Push(constructor, new_target);
548 FastNewObjectStub stub(masm->isolate());
549 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550 __ Mov(x4, x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 __ Pop(new_target, constructor);
552
Ben Murdoch097c5b22016-05-18 11:27:45 +0100553 // ----------- S t a t e -------------
554 // -- x1: constructor function
555 // -- x3: new target
556 // -- x4: newly allocated object
557 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000558
559 // Reload the number of arguments from the stack.
560 // Set it up in x0 for the function call below.
561 // jssp[0]: number of arguments (smi-tagged)
562 __ Peek(argc, 0); // Load number of arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 }
564
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000565 __ SmiUntag(argc);
566
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 if (create_implicit_receiver) {
568 // Push the allocated receiver to the stack. We need two copies
569 // because we may have to return the original one and the calling
570 // conventions dictate that the called function pops the receiver.
571 __ Push(x4, x4);
572 } else {
573 __ PushRoot(Heap::kTheHoleValueRootIndex);
574 }
575
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576 // Set up pointer to last argument.
577 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
578
579 // Copy arguments and receiver to the expression stack.
580 // Copy 2 values every loop to use ldp/stp.
581 // x0: number of arguments
582 // x1: constructor function
583 // x2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000584 // x3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 // jssp[0]: receiver
586 // jssp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000587 // jssp[2]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000588 // Compute the start address of the copy in x3.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000589 __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000590 Label loop, entry, done_copying_arguments;
591 __ B(&entry);
592 __ Bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 __ Push(x11, x10);
595 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000596 __ Cmp(x4, x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597 __ B(gt, &loop);
598 // Because we copied values 2 by 2 we may have copied one extra value.
599 // Drop it if that is the case.
600 __ B(eq, &done_copying_arguments);
601 __ Drop(1);
602 __ Bind(&done_copying_arguments);
603
604 // Call the function.
605 // x0: number of arguments
606 // x1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607 // x3: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100608 ParameterCount actual(argc);
609 __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
610 CheckDebugStepCallWrapper());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611
612 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613 if (create_implicit_receiver && !is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000614 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
615 }
616
617 // Restore the context from the frame.
618 // x0: result
619 // jssp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 // jssp[1]: number of arguments (smi-tagged)
Ben Murdochda12d292016-06-02 14:46:10 +0100621 __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000623 if (create_implicit_receiver) {
624 // If the result is an object (in the ECMA sense), we should get rid
625 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
626 // on page 74.
627 Label use_receiver, exit;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629 // If the result is a smi, it is *not* an object in the ECMA sense.
630 // x0: result
631 // jssp[0]: receiver (newly allocated object)
632 // jssp[1]: number of arguments (smi-tagged)
633 __ JumpIfSmi(x0, &use_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635 // If the type of the result (stored in its map) is less than
636 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
637 __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 // Throw away the result of the constructor invocation and use the
640 // on-stack receiver as the result.
641 __ Bind(&use_receiver);
642 __ Peek(x0, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 // Remove the receiver from the stack, remove caller arguments, and
645 // return.
646 __ Bind(&exit);
647 // x0: result
648 // jssp[0]: receiver (newly allocated object)
649 // jssp[1]: number of arguments (smi-tagged)
650 __ Peek(x1, 1 * kXRegSize);
651 } else {
652 __ Peek(x1, 0);
653 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000654
655 // Leave construct frame.
656 }
657
Ben Murdoch097c5b22016-05-18 11:27:45 +0100658 // ES6 9.2.2. Step 13+
659 // Check that the result is not a Smi, indicating that the constructor result
660 // from a derived class is neither undefined nor an Object.
661 if (check_derived_construct) {
662 Label dont_throw;
663 __ JumpIfNotSmi(x0, &dont_throw);
664 {
665 FrameScope scope(masm, StackFrame::INTERNAL);
666 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
667 }
668 __ Bind(&dont_throw);
669 }
670
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671 __ DropBySMI(x1);
672 __ Drop(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000673 if (create_implicit_receiver) {
674 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
675 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 __ Ret();
677}
678
679
680void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100681 Generate_JSConstructStubHelper(masm, false, true, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682}
683
684
685void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100686 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687}
688
689
690void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691 Generate_JSConstructStubHelper(masm, false, false, false);
692}
693
694
695void Builtins::Generate_JSBuiltinsConstructStubForDerived(
696 MacroAssembler* masm) {
697 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698}
699
700
701void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
702 FrameScope scope(masm, StackFrame::INTERNAL);
703 __ Push(x1);
704 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
705}
706
Ben Murdochc5610432016-08-08 18:44:38 +0100707// static
708void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
709 // ----------- S t a t e -------------
710 // -- x0 : the value to pass to the generator
711 // -- x1 : the JSGeneratorObject to resume
712 // -- x2 : the resume mode (tagged)
713 // -- lr : return address
714 // -----------------------------------
715 __ AssertGeneratorObject(x1);
716
717 // Store input value into generator object.
718 __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOffset));
719 __ RecordWriteField(x1, JSGeneratorObject::kInputOffset, x0, x3,
720 kLRHasNotBeenSaved, kDontSaveFPRegs);
721
722 // Store resume mode into generator object.
723 __ Str(x2, FieldMemOperand(x1, JSGeneratorObject::kResumeModeOffset));
724
725 // Load suspended function and context.
726 __ Ldr(cp, FieldMemOperand(x1, JSGeneratorObject::kContextOffset));
727 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
728
729 // Flood function if we are stepping.
730 Label skip_flooding;
731 ExternalReference step_in_enabled =
732 ExternalReference::debug_step_in_enabled_address(masm->isolate());
733 __ Mov(x10, Operand(step_in_enabled));
734 __ Ldrb(x10, MemOperand(x10));
735 __ CompareAndBranch(x10, Operand(0), eq, &skip_flooding);
736 {
737 FrameScope scope(masm, StackFrame::INTERNAL);
738 __ Push(x1, x2, x4);
739 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
740 __ Pop(x2, x1);
741 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
742 }
743 __ bind(&skip_flooding);
744
745 // Push receiver.
746 __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset));
747 __ Push(x5);
748
749 // ----------- S t a t e -------------
750 // -- x1 : the JSGeneratorObject to resume
751 // -- x2 : the resume mode (tagged)
752 // -- x4 : generator function
753 // -- cp : generator context
754 // -- lr : return address
755 // -- jssp[0] : generator receiver
756 // -----------------------------------
757
758 // Push holes for arguments to generator function. Since the parser forced
759 // context allocation for any variables in generators, the actual argument
760 // values have already been copied into the context and these dummy values
761 // will never be used.
762 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
763 __ Ldr(w10,
764 FieldMemOperand(x10, SharedFunctionInfo::kFormalParameterCountOffset));
765 __ LoadRoot(x11, Heap::kTheHoleValueRootIndex);
766 __ PushMultipleTimes(x11, w10);
767
768 // Dispatch on the kind of generator object.
769 Label old_generator;
770 __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
771 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
772 __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
773 __ B(ne, &old_generator);
774
775 // New-style (ignition/turbofan) generator object
776 {
777 __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
778 __ Ldr(x0,
779 FieldMemOperand(x0, SharedFunctionInfo::kFormalParameterCountOffset));
780 __ SmiUntag(x0);
781 // We abuse new.target both to indicate that this is a resume call and to
782 // pass in the generator object. In ordinary calls, new.target is always
783 // undefined because generator functions are non-constructable.
784 __ Move(x3, x1);
785 __ Move(x1, x4);
786 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
787 __ Jump(x5);
788 }
789
790 // Old-style (full-codegen) generator object
791 __ bind(&old_generator);
792 {
793 // Enter a new JavaScript frame, and initialize its slots as they were when
794 // the generator was suspended.
795 FrameScope scope(masm, StackFrame::MANUAL);
796 __ Push(lr, fp);
797 __ Move(fp, jssp);
798 __ Push(cp, x4);
799
800 // Restore the operand stack.
801 __ Ldr(x0, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
802 __ Ldr(w3, UntagSmiFieldMemOperand(x0, FixedArray::kLengthOffset));
803 __ Add(x0, x0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
804 __ Add(x3, x0, Operand(x3, LSL, kPointerSizeLog2));
805 {
806 Label done_loop, loop;
807 __ Bind(&loop);
808 __ Cmp(x0, x3);
809 __ B(eq, &done_loop);
810 __ Ldr(x10, MemOperand(x0, kPointerSize, PostIndex));
811 __ Push(x10);
812 __ B(&loop);
813 __ Bind(&done_loop);
814 }
815
816 // Reset operand stack so we don't leak.
817 __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
818 __ Str(x10, FieldMemOperand(x1, JSGeneratorObject::kOperandStackOffset));
819
820 // Resume the generator function at the continuation.
821 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
822 __ Ldr(x10, FieldMemOperand(x10, SharedFunctionInfo::kCodeOffset));
823 __ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag);
824 __ Ldrsw(x11,
825 UntagSmiFieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
826 __ Add(x10, x10, x11);
827 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
828 __ Str(x12, FieldMemOperand(x1, JSGeneratorObject::kContinuationOffset));
829 __ Move(x0, x1); // Continuation expects generator object in x0.
830 __ Br(x10);
831 }
832}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000833
834enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
835
836
837// Clobbers x10, x15; preserves all other registers.
838static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
839 IsTagged argc_is_tagged) {
840 // Check the stack for overflow.
841 // We are not trying to catch interruptions (e.g. debug break and
842 // preemption) here, so the "real stack limit" is checked.
843 Label enough_stack_space;
844 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
845 // Make x10 the space we have left. The stack might already be overflowed
846 // here which will cause x10 to become negative.
847 // TODO(jbramley): Check that the stack usage here is safe.
848 __ Sub(x10, jssp, x10);
849 // Check if the arguments will overflow the stack.
850 if (argc_is_tagged == kArgcIsSmiTagged) {
851 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
852 } else {
853 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
854 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
855 }
856 __ B(gt, &enough_stack_space);
857 __ CallRuntime(Runtime::kThrowStackOverflow);
858 // We should never return from the APPLY_OVERFLOW builtin.
859 if (__ emit_debug_code()) {
860 __ Unreachable();
861 }
862
863 __ Bind(&enough_stack_space);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000864}
865
866
867// Input:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000868// x0: new.target.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000869// x1: function.
870// x2: receiver.
871// x3: argc.
872// x4: argv.
873// Output:
874// x0: result.
875static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
876 bool is_construct) {
877 // Called from JSEntryStub::GenerateBody().
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000878 Register new_target = x0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879 Register function = x1;
880 Register receiver = x2;
881 Register argc = x3;
882 Register argv = x4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000883 Register scratch = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884
885 ProfileEntryHookStub::MaybeCallEntryHook(masm);
886
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887 {
888 // Enter an internal frame.
889 FrameScope scope(masm, StackFrame::INTERNAL);
890
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 // Setup the context (we need to use the caller context from the isolate).
892 __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
893 masm->isolate())));
894 __ Ldr(cp, MemOperand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000895
896 __ InitializeRootRegister();
897
898 // Push the function and the receiver onto the stack.
899 __ Push(function, receiver);
900
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000901 // Check if we have enough stack space to push all arguments.
902 // Expects argument count in eax. Clobbers ecx, edx, edi.
903 Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
904
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 // Copy arguments to the stack in a loop, in reverse order.
906 // x3: argc.
907 // x4: argv.
908 Label loop, entry;
909 // Compute the copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000910 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000911
912 __ B(&entry);
913 __ Bind(&loop);
914 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
915 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
916 __ Push(x12); // Push the argument.
917 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000918 __ Cmp(scratch, argv);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000919 __ B(ne, &loop);
920
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000921 __ Mov(scratch, argc);
922 __ Mov(argc, new_target);
923 __ Mov(new_target, scratch);
924 // x0: argc.
925 // x3: new.target.
926
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000927 // Initialize all JavaScript callee-saved registers, since they will be seen
928 // by the garbage collector as part of handlers.
929 // The original values have been saved in JSEntryStub::GenerateBody().
930 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
931 __ Mov(x20, x19);
932 __ Mov(x21, x19);
933 __ Mov(x22, x19);
934 __ Mov(x23, x19);
935 __ Mov(x24, x19);
936 __ Mov(x25, x19);
937 // Don't initialize the reserved registers.
938 // x26 : root register (root).
939 // x27 : context pointer (cp).
940 // x28 : JS stack pointer (jssp).
941 // x29 : frame pointer (fp).
942
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000943 Handle<Code> builtin = is_construct
944 ? masm->isolate()->builtins()->Construct()
945 : masm->isolate()->builtins()->Call();
946 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000947
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000948 // Exit the JS internal frame and remove the parameters (except function),
949 // and return.
950 }
951
952 // Result is in x0. Return.
953 __ Ret();
954}
955
956
957void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
958 Generate_JSEntryTrampolineHelper(masm, false);
959}
960
961
962void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
963 Generate_JSEntryTrampolineHelper(masm, true);
964}
965
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966// Generate code for entering a JS function with the interpreter.
967// On entry to the function the receiver and arguments have been pushed on the
968// stack left to right. The actual argument count matches the formal parameter
969// count expected by the function.
970//
971// The live registers are:
972// - x1: the JS function object being called.
973// - x3: the new target
974// - cp: our context.
975// - fp: our caller's frame pointer.
976// - jssp: stack pointer.
977// - lr: return address.
978//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100979// The function builds an interpreter frame. See InterpreterFrameConstants in
980// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100982 ProfileEntryHookStub::MaybeCallEntryHook(masm);
983
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000984 // Open a frame scope to indicate that there is a frame on the stack. The
985 // MANUAL indicates that the scope shouldn't actually generate code to set up
986 // the frame (that is done below).
987 FrameScope frame_scope(masm, StackFrame::MANUAL);
988 __ Push(lr, fp, cp, x1);
989 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000990
Ben Murdochc5610432016-08-08 18:44:38 +0100991 // Get the bytecode array from the function object (or from the DebugInfo if
992 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000993 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100994 Register debug_info = kInterpreterBytecodeArrayRegister;
995 Label load_debug_bytecode_array, bytecode_array_loaded;
996 DCHECK(!debug_info.is(x0));
997 __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
998 __ Cmp(debug_info, Operand(DebugInfo::uninitialized()));
999 __ B(ne, &load_debug_bytecode_array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001000 __ Ldr(kInterpreterBytecodeArrayRegister,
1001 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001002 __ Bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001003
Ben Murdochc5610432016-08-08 18:44:38 +01001004 // Check function data field is actually a BytecodeArray object.
1005 Label bytecode_array_not_present;
1006 __ CompareRoot(kInterpreterBytecodeArrayRegister,
1007 Heap::kUndefinedValueRootIndex);
1008 __ B(eq, &bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001009 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001010 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1011 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1012 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
1013 BYTECODE_ARRAY_TYPE);
1014 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1015 }
1016
Ben Murdochc5610432016-08-08 18:44:38 +01001017 // Load the initial bytecode offset.
1018 __ Mov(kInterpreterBytecodeOffsetRegister,
1019 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1020
1021 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1022 __ SmiTag(x0, kInterpreterBytecodeOffsetRegister);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001023 __ Push(x3, kInterpreterBytecodeArrayRegister, x0);
1024
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001025 // Allocate the local and temporary register file on the stack.
1026 {
1027 // Load frame size from the BytecodeArray object.
1028 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1029 BytecodeArray::kFrameSizeOffset));
1030
1031 // Do a stack check to ensure we don't go over the limit.
1032 Label ok;
1033 DCHECK(jssp.Is(__ StackPointer()));
1034 __ Sub(x10, jssp, Operand(x11));
1035 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
1036 __ B(hs, &ok);
1037 __ CallRuntime(Runtime::kThrowStackOverflow);
1038 __ Bind(&ok);
1039
1040 // If ok, push undefined as the initial value for all register file entries.
1041 // Note: there should always be at least one stack slot for the return
1042 // register in the register file.
1043 Label loop_header;
1044 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
1045 // TODO(rmcilroy): Ensure we always have an even number of registers to
1046 // allow stack to be 16 bit aligned (and remove need for jssp).
1047 __ Lsr(x11, x11, kPointerSizeLog2);
1048 __ PushMultipleTimes(x10, x11);
1049 __ Bind(&loop_header);
1050 }
1051
Ben Murdochc5610432016-08-08 18:44:38 +01001052 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001053 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001054 __ Mov(kInterpreterDispatchTableRegister,
1055 Operand(ExternalReference::interpreter_dispatch_table_address(
1056 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001057
1058 // Dispatch to the first bytecode handler for the function.
1059 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1060 kInterpreterBytecodeOffsetRegister));
1061 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1062 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001063 __ Call(ip0);
Ben Murdochc5610432016-08-08 18:44:38 +01001064 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001065
Ben Murdochc5610432016-08-08 18:44:38 +01001066 // The return value is in x0.
1067
1068 // Get the arguments + reciever count.
1069 __ ldr(x1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1070 __ Ldr(w1, FieldMemOperand(x1, BytecodeArray::kParameterSizeOffset));
1071
1072 // Leave the frame (also dropping the register file).
1073 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1074
1075 // Drop receiver + arguments and return.
1076 __ Drop(x1, 1);
1077 __ Ret();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001078
1079 // Load debug copy of the bytecode array.
1080 __ Bind(&load_debug_bytecode_array);
1081 __ Ldr(kInterpreterBytecodeArrayRegister,
1082 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1083 __ B(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084
Ben Murdochc5610432016-08-08 18:44:38 +01001085 // If the bytecode array is no longer present, then the underlying function
1086 // has been switched to a different kind of code and we heal the closure by
1087 // switching the code entry field over to the new code object as well.
1088 __ Bind(&bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001089 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
Ben Murdochc5610432016-08-08 18:44:38 +01001090 __ Ldr(x7, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1091 __ Ldr(x7, FieldMemOperand(x7, SharedFunctionInfo::kCodeOffset));
1092 __ Add(x7, x7, Operand(Code::kHeaderSize - kHeapObjectTag));
1093 __ Str(x7, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
1094 __ RecordWriteCodeEntryField(x1, x7, x5);
1095 __ Jump(x7);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001096}
1097
Ben Murdochc5610432016-08-08 18:44:38 +01001098// static
1099void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1100 MacroAssembler* masm, TailCallMode tail_call_mode) {
1101 // ----------- S t a t e -------------
1102 // -- x0 : the number of arguments (not including the receiver)
1103 // -- x2 : the address of the first argument to be pushed. Subsequent
1104 // arguments should be consecutive above this, in the same order as
1105 // they are to be pushed onto the stack.
1106 // -- x1 : the target to call (can be any Object).
1107 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108
Ben Murdochc5610432016-08-08 18:44:38 +01001109 // Find the address of the last argument.
1110 __ add(x3, x0, Operand(1)); // Add one for receiver.
1111 __ lsl(x3, x3, kPointerSizeLog2);
1112 __ sub(x4, x2, x3);
1113
1114 // Push the arguments.
1115 Label loop_header, loop_check;
1116 __ Mov(x5, jssp);
1117 __ Claim(x3, 1);
1118 __ B(&loop_check);
1119 __ Bind(&loop_header);
1120 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1121 __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
1122 __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
1123 __ Bind(&loop_check);
1124 __ Cmp(x2, x4);
1125 __ B(gt, &loop_header);
1126
1127 // Call the target.
1128 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1129 tail_call_mode),
1130 RelocInfo::CODE_TARGET);
1131}
1132
1133// static
1134void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1135 // ----------- S t a t e -------------
1136 // -- x0 : argument count (not including receiver)
1137 // -- x3 : new target
1138 // -- x1 : constructor to call
1139 // -- x2 : address of the first argument
1140 // -----------------------------------
1141
1142 // Find the address of the last argument.
1143 __ add(x5, x0, Operand(1)); // Add one for receiver (to be constructed).
1144 __ lsl(x5, x5, kPointerSizeLog2);
1145
1146 // Set stack pointer and where to stop.
1147 __ Mov(x6, jssp);
1148 __ Claim(x5, 1);
1149 __ sub(x4, x6, x5);
1150
1151 // Push a slot for the receiver.
1152 __ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
1153
1154 Label loop_header, loop_check;
1155 // Push the arguments.
1156 __ B(&loop_check);
1157 __ Bind(&loop_header);
1158 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1159 __ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
1160 __ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
1161 __ Bind(&loop_check);
1162 __ Cmp(x6, x4);
1163 __ B(gt, &loop_header);
1164
1165 // Call the constructor with x0, x1, and x3 unmodified.
1166 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1167}
1168
1169void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1170 // Set the return address to the correct point in the interpreter entry
1171 // trampoline.
1172 Smi* interpreter_entry_return_pc_offset(
1173 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1174 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1175 __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1176 __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
1177 Code::kHeaderSize - kHeapObjectTag));
1178
1179 // Initialize the dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001180 __ Mov(kInterpreterDispatchTableRegister,
1181 Operand(ExternalReference::interpreter_dispatch_table_address(
1182 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001183
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001184 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001185 __ Ldr(kInterpreterBytecodeArrayRegister,
1186 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001187
1188 if (FLAG_debug_code) {
1189 // Check function data field is actually a BytecodeArray object.
1190 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1191 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1192 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1193 BYTECODE_ARRAY_TYPE);
1194 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1195 }
1196
1197 // Get the target bytecode offset from the frame.
1198 __ Ldr(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001199 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001200 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1201
1202 // Dispatch to the target bytecode.
1203 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1204 kInterpreterBytecodeOffsetRegister));
1205 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1206 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001207 __ Jump(ip0);
1208}
1209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001211 // ----------- S t a t e -------------
1212 // -- x0 : argument count (preserved for callee)
1213 // -- x3 : new target (preserved for callee)
1214 // -- x1 : target function (preserved for callee)
1215 // -----------------------------------
1216 // First lookup code, maybe we don't need to compile!
1217 Label gotta_call_runtime;
1218 Label maybe_call_runtime;
1219 Label try_shared;
1220 Label loop_top, loop_bottom;
1221
1222 Register closure = x1;
1223 Register map = x13;
1224 Register index = x2;
1225 __ Ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1226 __ Ldr(map,
1227 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1228 __ Ldrsw(index, UntagSmiFieldMemOperand(map, FixedArray::kLengthOffset));
1229 __ Cmp(index, Operand(2));
1230 __ B(lt, &gotta_call_runtime);
1231
1232 // Find literals.
1233 // x3 : native context
1234 // x2 : length / index
1235 // x13 : optimized code map
1236 // stack[0] : new target
1237 // stack[4] : closure
1238 Register native_context = x4;
1239 __ Ldr(native_context, NativeContextMemOperand());
1240
1241 __ Bind(&loop_top);
1242 Register temp = x5;
1243 Register array_pointer = x6;
1244
1245 // Does the native context match?
1246 __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2));
1247 __ Ldr(temp, FieldMemOperand(array_pointer,
1248 SharedFunctionInfo::kOffsetToPreviousContext));
1249 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1250 __ Cmp(temp, native_context);
1251 __ B(ne, &loop_bottom);
1252 // OSR id set to none?
1253 __ Ldr(temp, FieldMemOperand(array_pointer,
1254 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1255 const int bailout_id = BailoutId::None().ToInt();
1256 __ Cmp(temp, Operand(Smi::FromInt(bailout_id)));
1257 __ B(ne, &loop_bottom);
1258 // Literals available?
1259 __ Ldr(temp, FieldMemOperand(array_pointer,
1260 SharedFunctionInfo::kOffsetToPreviousLiterals));
1261 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1262 __ JumpIfSmi(temp, &gotta_call_runtime);
1263
1264 // Save the literals in the closure.
1265 __ Str(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
1266 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, x7,
1267 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1268 OMIT_SMI_CHECK);
1269
1270 // Code available?
1271 Register entry = x7;
1272 __ Ldr(entry,
1273 FieldMemOperand(array_pointer,
1274 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1275 __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1276 __ JumpIfSmi(entry, &maybe_call_runtime);
1277
1278 // Found literals and code. Get them into the closure and return.
1279 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1280
1281 Label install_optimized_code_and_tailcall;
1282 __ Bind(&install_optimized_code_and_tailcall);
1283 __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1284 __ RecordWriteCodeEntryField(closure, entry, x5);
1285
1286 // Link the closure into the optimized function list.
1287 // x7 : code entry
1288 // x4 : native context
1289 // x1 : closure
1290 __ Ldr(x8,
1291 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1292 __ Str(x8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1293 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, x8, x13,
1294 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1295 OMIT_SMI_CHECK);
1296 const int function_list_offset =
1297 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1298 __ Str(closure,
1299 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1300 __ Mov(x5, closure);
1301 __ RecordWriteContextSlot(native_context, function_list_offset, x5, x13,
1302 kLRHasNotBeenSaved, kDontSaveFPRegs);
1303 __ Jump(entry);
1304
1305 __ Bind(&loop_bottom);
1306 __ Sub(index, index, Operand(SharedFunctionInfo::kEntryLength));
1307 __ Cmp(index, Operand(1));
1308 __ B(gt, &loop_top);
1309
1310 // We found neither literals nor code.
1311 __ B(&gotta_call_runtime);
1312
1313 __ Bind(&maybe_call_runtime);
1314
1315 // Last possibility. Check the context free optimized code map entry.
1316 __ Ldr(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1317 SharedFunctionInfo::kSharedCodeIndex));
1318 __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1319 __ JumpIfSmi(entry, &try_shared);
1320
1321 // Store code entry in the closure.
1322 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1323 __ B(&install_optimized_code_and_tailcall);
1324
1325 __ Bind(&try_shared);
1326 // Is the full code valid?
1327 __ Ldr(entry,
1328 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1329 __ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1330 __ Ldr(x5, FieldMemOperand(entry, Code::kFlagsOffset));
1331 __ and_(x5, x5, Operand(Code::KindField::kMask));
1332 __ Mov(x5, Operand(x5, LSR, Code::KindField::kShift));
1333 __ Cmp(x5, Operand(Code::BUILTIN));
1334 __ B(eq, &gotta_call_runtime);
1335 // Yes, install the full code.
1336 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1337 __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1338 __ RecordWriteCodeEntryField(closure, entry, x5);
1339 __ Jump(entry);
1340
1341 __ Bind(&gotta_call_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001342 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001343}
1344
Ben Murdochc5610432016-08-08 18:44:38 +01001345void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1346 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1347}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001348
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001350 GenerateTailCallToReturnedCode(masm,
1351 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352}
1353
1354
1355void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001356 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357}
1358
1359
1360static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1361 // For now, we are relying on the fact that make_code_young doesn't do any
1362 // garbage collection which allows us to save/restore the registers without
1363 // worrying about which of them contain pointers. We also don't build an
1364 // internal frame to make the code fast, since we shouldn't have to do stack
1365 // crawls in MakeCodeYoung. This seems a bit fragile.
1366
1367 // The following caller-saved registers must be saved and restored when
1368 // calling through to the runtime:
1369 // x0 - The address from which to resume execution.
1370 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001371 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001372 // lr - The return address for the JSFunction itself. It has not yet been
1373 // preserved on the stack because the frame setup code was replaced
1374 // with a call to this stub, to handle code ageing.
1375 {
1376 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001377 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1379 __ CallCFunction(
1380 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001381 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001382 }
1383
1384 // The calling function has been made young again, so return to execute the
1385 // real frame set-up code.
1386 __ Br(x0);
1387}
1388
1389#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1390void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1391 MacroAssembler* masm) { \
1392 GenerateMakeCodeYoungAgainCommon(masm); \
1393} \
1394void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1395 MacroAssembler* masm) { \
1396 GenerateMakeCodeYoungAgainCommon(masm); \
1397}
1398CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1399#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1400
1401
1402void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1403 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1404 // that make_code_young doesn't do any garbage collection which allows us to
1405 // save/restore the registers without worrying about which of them contain
1406 // pointers.
1407
1408 // The following caller-saved registers must be saved and restored when
1409 // calling through to the runtime:
1410 // x0 - The address from which to resume execution.
1411 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001412 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001413 // lr - The return address for the JSFunction itself. It has not yet been
1414 // preserved on the stack because the frame setup code was replaced
1415 // with a call to this stub, to handle code ageing.
1416 {
1417 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1420 __ CallCFunction(
1421 ExternalReference::get_mark_code_as_executed_function(
1422 masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001423 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424
1425 // Perform prologue operations usually performed by the young code stub.
1426 __ EmitFrameSetupForCodeAgePatching(masm);
1427 }
1428
1429 // Jump to point after the code-age stub.
1430 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1431 __ Br(x0);
1432}
1433
1434
1435void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1436 GenerateMakeCodeYoungAgainCommon(masm);
1437}
1438
1439
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1441 Generate_MarkCodeAsExecutedOnce(masm);
1442}
1443
1444
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001445static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1446 SaveFPRegsMode save_doubles) {
1447 {
1448 FrameScope scope(masm, StackFrame::INTERNAL);
1449
1450 // Preserve registers across notification, this is important for compiled
1451 // stubs that tail call the runtime on deopts passing their parameters in
1452 // registers.
1453 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1454 // registers here? According to the comment above, we should only need to
1455 // preserve the registers with parameters.
1456 __ PushXRegList(kSafepointSavedRegisters);
1457 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001458 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001459 __ PopXRegList(kSafepointSavedRegisters);
1460 }
1461
1462 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1463 __ Drop(1);
1464
1465 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1466 // into lr before it jumps here.
1467 __ Br(lr);
1468}
1469
1470
1471void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1472 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1473}
1474
1475
1476void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1477 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1478}
1479
1480
1481static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1482 Deoptimizer::BailoutType type) {
1483 {
1484 FrameScope scope(masm, StackFrame::INTERNAL);
1485 // Pass the deoptimization type to the runtime system.
1486 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1487 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001488 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001489 }
1490
1491 // Get the full codegen state from the stack and untag it.
1492 Register state = x6;
1493 __ Peek(state, 0);
1494 __ SmiUntag(state);
1495
1496 // Switch on the state.
1497 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001498 __ CompareAndBranch(state,
1499 static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS),
1500 ne, &with_tos_register);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001501 __ Drop(1); // Remove state.
1502 __ Ret();
1503
1504 __ Bind(&with_tos_register);
1505 // Reload TOS register.
Ben Murdochc5610432016-08-08 18:44:38 +01001506 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), x0.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001507 __ Peek(x0, kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01001508 __ CompareAndBranch(state,
1509 static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER),
1510 ne, &unknown_state);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001511 __ Drop(2); // Remove state and TOS.
1512 __ Ret();
1513
1514 __ Bind(&unknown_state);
1515 __ Abort(kInvalidFullCodegenState);
1516}
1517
1518
1519void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1520 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1521}
1522
1523
1524void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1525 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1526}
1527
1528
1529void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1530 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1531}
1532
1533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1535 Register function_template_info,
1536 Register scratch0, Register scratch1,
1537 Register scratch2,
1538 Label* receiver_check_failed) {
1539 Register signature = scratch0;
1540 Register map = scratch1;
1541 Register constructor = scratch2;
1542
1543 // If there is no signature, return the holder.
1544 __ Ldr(signature, FieldMemOperand(function_template_info,
1545 FunctionTemplateInfo::kSignatureOffset));
1546 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1547 Label receiver_check_passed;
1548 __ B(eq, &receiver_check_passed);
1549
1550 // Walk the prototype chain.
1551 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1552 Label prototype_loop_start;
1553 __ Bind(&prototype_loop_start);
1554
1555 // Get the constructor, if any
1556 __ GetMapConstructor(constructor, map, x16, x16);
1557 __ cmp(x16, Operand(JS_FUNCTION_TYPE));
1558 Label next_prototype;
1559 __ B(ne, &next_prototype);
1560 Register type = constructor;
1561 __ Ldr(type,
1562 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1563 __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1564
1565 // Loop through the chain of inheriting function templates.
1566 Label function_template_loop;
1567 __ Bind(&function_template_loop);
1568
1569 // If the signatures match, we have a compatible receiver.
1570 __ Cmp(signature, type);
1571 __ B(eq, &receiver_check_passed);
1572
1573 // If the current type is not a FunctionTemplateInfo, load the next prototype
1574 // in the chain.
1575 __ JumpIfSmi(type, &next_prototype);
1576 __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
1577 __ B(ne, &next_prototype);
1578
1579 // Otherwise load the parent function template and iterate.
1580 __ Ldr(type,
1581 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1582 __ B(&function_template_loop);
1583
1584 // Load the next prototype.
1585 __ Bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001586 __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001587 __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001588 __ B(eq, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001589 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1590 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001591 // Iterate.
1592 __ B(&prototype_loop_start);
1593
1594 __ Bind(&receiver_check_passed);
1595}
1596
1597
1598void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1599 // ----------- S t a t e -------------
1600 // -- x0 : number of arguments excluding receiver
1601 // -- x1 : callee
1602 // -- lr : return address
1603 // -- sp[0] : last argument
1604 // -- ...
1605 // -- sp[8 * (argc - 1)] : first argument
1606 // -- sp[8 * argc] : receiver
1607 // -----------------------------------
1608
1609 // Load the FunctionTemplateInfo.
1610 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1611 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
1612
1613 // Do the compatible receiver check.
1614 Label receiver_check_failed;
1615 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1616 CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
1617
1618 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1619 // beginning of the code.
1620 __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
1621 __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
1622 __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
1623 __ Jump(x4);
1624
1625 // Compatible receiver check failed: throw an Illegal Invocation exception.
1626 __ Bind(&receiver_check_failed);
1627 // Drop the arguments (including the receiver)
1628 __ add(x0, x0, Operand(1));
1629 __ Drop(x0);
1630 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1631}
1632
1633
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1635 // Lookup the function in the JavaScript frame.
1636 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1637 {
1638 FrameScope scope(masm, StackFrame::INTERNAL);
1639 // Pass function as argument.
1640 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642 }
1643
1644 // If the code object is null, just return to the unoptimized code.
1645 Label skip;
1646 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1647 __ Ret();
1648
1649 __ Bind(&skip);
1650
1651 // Load deoptimization data from the code object.
1652 // <deopt_data> = <code>[#deoptimization_data_offset]
1653 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1654
1655 // Load the OSR entrypoint offset from the deoptimization data.
1656 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1657 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1658 DeoptimizationInputData::kOsrPcOffsetIndex)));
1659
1660 // Compute the target address = code_obj + header_size + osr_offset
1661 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1662 __ Add(x0, x0, x1);
1663 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1664
1665 // And "return" to the OSR entry point of the function.
1666 __ Ret();
1667}
1668
1669
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001670// static
1671void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1672 int field_index) {
1673 // ----------- S t a t e -------------
1674 // -- lr : return address
1675 // -- jssp[0] : receiver
1676 // -----------------------------------
1677 ASM_LOCATION("Builtins::Generate_DatePrototype_GetField");
1678
1679 // 1. Pop receiver into x0 and check that it's actually a JSDate object.
1680 Label receiver_not_date;
1681 {
1682 __ Pop(x0);
1683 __ JumpIfSmi(x0, &receiver_not_date);
1684 __ JumpIfNotObjectType(x0, x1, x2, JS_DATE_TYPE, &receiver_not_date);
1685 }
1686
1687 // 2. Load the specified date field, falling back to the runtime as necessary.
1688 if (field_index == JSDate::kDateValue) {
1689 __ Ldr(x0, FieldMemOperand(x0, JSDate::kValueOffset));
1690 } else {
1691 if (field_index < JSDate::kFirstUncachedField) {
1692 Label stamp_mismatch;
1693 __ Mov(x1, ExternalReference::date_cache_stamp(masm->isolate()));
1694 __ Ldr(x1, MemOperand(x1));
1695 __ Ldr(x2, FieldMemOperand(x0, JSDate::kCacheStampOffset));
1696 __ Cmp(x1, x2);
1697 __ B(ne, &stamp_mismatch);
1698 __ Ldr(x0, FieldMemOperand(
1699 x0, JSDate::kValueOffset + field_index * kPointerSize));
1700 __ Ret();
1701 __ Bind(&stamp_mismatch);
1702 }
1703 FrameScope scope(masm, StackFrame::INTERNAL);
1704 __ Mov(x1, Smi::FromInt(field_index));
1705 __ CallCFunction(
1706 ExternalReference::get_date_field_function(masm->isolate()), 2);
1707 }
1708 __ Ret();
1709
1710 // 3. Raise a TypeError if the receiver is not a date.
1711 __ Bind(&receiver_not_date);
1712 __ TailCallRuntime(Runtime::kThrowNotDateError);
1713}
1714
Ben Murdochda12d292016-06-02 14:46:10 +01001715// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1717 // ----------- S t a t e -------------
1718 // -- x0 : argc
1719 // -- jssp[0] : argArray (if argc == 2)
1720 // -- jssp[8] : thisArg (if argc >= 1)
1721 // -- jssp[16] : receiver
1722 // -----------------------------------
1723 ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
1724
1725 Register argc = x0;
1726 Register arg_array = x0;
1727 Register receiver = x1;
1728 Register this_arg = x2;
1729 Register undefined_value = x3;
1730 Register null_value = x4;
1731
1732 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1733 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1734
1735 // 1. Load receiver into x1, argArray into x0 (if present), remove all
1736 // arguments from the stack (including the receiver), and push thisArg (if
1737 // present) instead.
1738 {
1739 // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
1740 // consistent state for a simple pop operation.
1741 __ Claim(2);
1742 __ Drop(argc);
1743
1744 // ----------- S t a t e -------------
1745 // -- x0 : argc
1746 // -- jssp[0] : argArray (dummy value if argc <= 1)
1747 // -- jssp[8] : thisArg (dummy value if argc == 0)
1748 // -- jssp[16] : receiver
1749 // -----------------------------------
1750 __ Cmp(argc, 1);
1751 __ Pop(arg_array, this_arg); // Overwrites argc.
1752 __ CmovX(this_arg, undefined_value, lo); // undefined if argc == 0.
1753 __ CmovX(arg_array, undefined_value, ls); // undefined if argc <= 1.
1754
1755 __ Peek(receiver, 0);
1756 __ Poke(this_arg, 0);
1757 }
1758
1759 // ----------- S t a t e -------------
1760 // -- x0 : argArray
1761 // -- x1 : receiver
1762 // -- x3 : undefined root value
1763 // -- jssp[0] : thisArg
1764 // -----------------------------------
1765
1766 // 2. Make sure the receiver is actually callable.
1767 Label receiver_not_callable;
1768 __ JumpIfSmi(receiver, &receiver_not_callable);
1769 __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1770 __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
1771 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
1772 &receiver_not_callable);
1773
1774 // 3. Tail call with no arguments if argArray is null or undefined.
1775 Label no_arguments;
1776 __ Cmp(arg_array, null_value);
1777 __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1778 __ B(eq, &no_arguments);
1779
1780 // 4a. Apply the receiver to the given argArray (passing undefined for
1781 // new.target in x3).
1782 DCHECK(undefined_value.Is(x3));
1783 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1784
1785 // 4b. The argArray is either null or undefined, so we tail call without any
1786 // arguments to the receiver.
1787 __ Bind(&no_arguments);
1788 {
1789 __ Mov(x0, 0);
1790 DCHECK(receiver.Is(x1));
1791 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1792 }
1793
1794 // 4c. The receiver is not callable, throw an appropriate TypeError.
1795 __ Bind(&receiver_not_callable);
1796 {
1797 __ Poke(receiver, 0);
1798 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1799 }
1800}
1801
1802
1803// static
1804void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001805 Register argc = x0;
1806 Register function = x1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001807 Register scratch1 = x10;
1808 Register scratch2 = x11;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001810 ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1811
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001813 {
1814 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001815 __ Cbnz(argc, &done);
1816 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1817 __ Push(scratch1);
1818 __ Mov(argc, 1);
1819 __ Bind(&done);
1820 }
1821
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001822 // 2. Get the callable to call (passed as receiver) from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001823 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001824
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001825 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001826 // (overwriting the original receiver). Adjust argument count to make
1827 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001828 {
1829 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001830 // Calculate the copy start address (destination). Copy end address is jssp.
1831 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1832 __ Sub(scratch1, scratch2, kPointerSize);
1833
1834 __ Bind(&loop);
1835 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1836 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1837 __ Cmp(scratch1, jssp);
1838 __ B(ge, &loop);
1839 // Adjust the actual number of arguments and remove the top element
1840 // (which is a copy of the last argument).
1841 __ Sub(argc, argc, 1);
1842 __ Drop(1);
1843 }
1844
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001845 // 4. Call the callable.
1846 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001847}
1848
1849
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001850void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1851 // ----------- S t a t e -------------
1852 // -- x0 : argc
1853 // -- jssp[0] : argumentsList (if argc == 3)
1854 // -- jssp[8] : thisArgument (if argc >= 2)
1855 // -- jssp[16] : target (if argc >= 1)
1856 // -- jssp[24] : receiver
1857 // -----------------------------------
1858 ASM_LOCATION("Builtins::Generate_ReflectApply");
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001859
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001860 Register argc = x0;
1861 Register arguments_list = x0;
1862 Register target = x1;
1863 Register this_argument = x2;
1864 Register undefined_value = x3;
1865
1866 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1867
1868 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1869 // remove all arguments from the stack (including the receiver), and push
1870 // thisArgument (if present) instead.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001871 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001872 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1873 // consistent state for a simple pop operation.
1874 __ Claim(3);
1875 __ Drop(argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001876
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877 // ----------- S t a t e -------------
1878 // -- x0 : argc
1879 // -- jssp[0] : argumentsList (dummy value if argc <= 2)
1880 // -- jssp[8] : thisArgument (dummy value if argc <= 1)
1881 // -- jssp[16] : target (dummy value if argc == 0)
1882 // -- jssp[24] : receiver
1883 // -----------------------------------
1884 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1885 __ Pop(arguments_list, this_argument, target); // Overwrites argc.
1886 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1887 __ Cmp(x10, 2);
1888 __ CmovX(this_argument, undefined_value, lo); // undefined if argc <= 1.
1889 __ CmovX(arguments_list, undefined_value, ls); // undefined if argc <= 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001891 __ Poke(this_argument, 0); // Overwrite receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001892 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001893
1894 // ----------- S t a t e -------------
1895 // -- x0 : argumentsList
1896 // -- x1 : target
1897 // -- jssp[0] : thisArgument
1898 // -----------------------------------
1899
1900 // 2. Make sure the target is actually callable.
1901 Label target_not_callable;
1902 __ JumpIfSmi(target, &target_not_callable);
1903 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1904 __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1905 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
1906
1907 // 3a. Apply the target to the given argumentsList (passing undefined for
1908 // new.target in x3).
1909 DCHECK(undefined_value.Is(x3));
1910 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1911
1912 // 3b. The target is not callable, throw an appropriate TypeError.
1913 __ Bind(&target_not_callable);
1914 {
1915 __ Poke(target, 0);
1916 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1917 }
1918}
1919
1920
1921void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1922 // ----------- S t a t e -------------
1923 // -- x0 : argc
1924 // -- jssp[0] : new.target (optional)
1925 // -- jssp[8] : argumentsList
1926 // -- jssp[16] : target
1927 // -- jssp[24] : receiver
1928 // -----------------------------------
1929 ASM_LOCATION("Builtins::Generate_ReflectConstruct");
1930
1931 Register argc = x0;
1932 Register arguments_list = x0;
1933 Register target = x1;
1934 Register new_target = x3;
1935 Register undefined_value = x4;
1936
1937 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1938
1939 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1940 // new.target into x3 (if present, otherwise use target), remove all
1941 // arguments from the stack (including the receiver), and push thisArgument
1942 // (if present) instead.
1943 {
1944 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1945 // consistent state for a simple pop operation.
1946 __ Claim(3);
1947 __ Drop(argc);
1948
1949 // ----------- S t a t e -------------
1950 // -- x0 : argc
1951 // -- jssp[0] : new.target (dummy value if argc <= 2)
1952 // -- jssp[8] : argumentsList (dummy value if argc <= 1)
1953 // -- jssp[16] : target (dummy value if argc == 0)
1954 // -- jssp[24] : receiver
1955 // -----------------------------------
1956 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1957 __ Pop(new_target, arguments_list, target); // Overwrites argc.
1958 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1959 __ Cmp(x10, 2);
1960 __ CmovX(arguments_list, undefined_value, lo); // undefined if argc <= 1.
1961 __ CmovX(new_target, target, ls); // target if argc <= 2.
1962
1963 __ Poke(undefined_value, 0); // Overwrite receiver.
1964 }
1965
1966 // ----------- S t a t e -------------
1967 // -- x0 : argumentsList
1968 // -- x1 : target
1969 // -- x3 : new.target
1970 // -- jssp[0] : receiver (undefined)
1971 // -----------------------------------
1972
1973 // 2. Make sure the target is actually a constructor.
1974 Label target_not_constructor;
1975 __ JumpIfSmi(target, &target_not_constructor);
1976 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1977 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1978 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1979 &target_not_constructor);
1980
1981 // 3. Make sure the new.target is actually a constructor.
1982 Label new_target_not_constructor;
1983 __ JumpIfSmi(new_target, &new_target_not_constructor);
1984 __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
1985 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1986 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1987 &new_target_not_constructor);
1988
1989 // 4a. Construct the target with the given new.target and argumentsList.
1990 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1991
1992 // 4b. The target is not a constructor, throw an appropriate TypeError.
1993 __ Bind(&target_not_constructor);
1994 {
1995 __ Poke(target, 0);
1996 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1997 }
1998
1999 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2000 __ Bind(&new_target_not_constructor);
2001 {
2002 __ Poke(new_target, 0);
2003 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2004 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002005}
2006
2007
2008static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2009 Label* stack_overflow) {
2010 // ----------- S t a t e -------------
2011 // -- x0 : actual number of arguments
2012 // -- x1 : function (passed through to callee)
2013 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002014 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002015 // -----------------------------------
2016 // Check the stack for overflow.
2017 // We are not trying to catch interruptions (e.g. debug break and
2018 // preemption) here, so the "real stack limit" is checked.
2019 Label enough_stack_space;
2020 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
2021 // Make x10 the space we have left. The stack might already be overflowed
2022 // here which will cause x10 to become negative.
2023 __ Sub(x10, jssp, x10);
2024 // Check if the arguments will overflow the stack.
2025 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
2026 __ B(le, stack_overflow);
2027}
2028
2029
2030static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2031 __ SmiTag(x10, x0);
2032 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2033 __ Push(lr, fp);
2034 __ Push(x11, x1, x10);
2035 __ Add(fp, jssp,
2036 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
2037}
2038
2039
2040static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2041 // ----------- S t a t e -------------
2042 // -- x0 : result being passed through
2043 // -----------------------------------
2044 // Get the number of arguments passed (as a smi), tear down the frame and
2045 // then drop the parameters and the receiver.
2046 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2047 kPointerSize)));
2048 __ Mov(jssp, fp);
2049 __ Pop(fp, lr);
2050 __ DropBySMI(x10, kXRegSize);
2051 __ Drop(1);
2052}
2053
2054
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002055// static
2056void Builtins::Generate_Apply(MacroAssembler* masm) {
2057 // ----------- S t a t e -------------
2058 // -- x0 : argumentsList
2059 // -- x1 : target
2060 // -- x3 : new.target (checked to be constructor or undefined)
2061 // -- jssp[0] : thisArgument
2062 // -----------------------------------
2063
2064 Register arguments_list = x0;
2065 Register target = x1;
2066 Register new_target = x3;
2067
2068 Register args = x0;
2069 Register len = x2;
2070
2071 // Create the list of arguments from the array-like argumentsList.
2072 {
2073 Label create_arguments, create_array, create_runtime, done_create;
2074 __ JumpIfSmi(arguments_list, &create_runtime);
2075
2076 // Load native context.
2077 Register native_context = x4;
2078 __ Ldr(native_context, NativeContextMemOperand());
2079
2080 // Load the map of argumentsList.
2081 Register arguments_list_map = x2;
2082 __ Ldr(arguments_list_map,
2083 FieldMemOperand(arguments_list, HeapObject::kMapOffset));
2084
2085 // Check if argumentsList is an (unmodified) arguments object.
2086 __ Ldr(x10, ContextMemOperand(native_context,
2087 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2088 __ Ldr(x11, ContextMemOperand(native_context,
2089 Context::STRICT_ARGUMENTS_MAP_INDEX));
2090 __ Cmp(arguments_list_map, x10);
2091 __ Ccmp(arguments_list_map, x11, ZFlag, ne);
2092 __ B(eq, &create_arguments);
2093
2094 // Check if argumentsList is a fast JSArray.
2095 __ CompareInstanceType(arguments_list_map, native_context, JS_ARRAY_TYPE);
2096 __ B(eq, &create_array);
2097
2098 // Ask the runtime to create the list (actually a FixedArray).
2099 __ Bind(&create_runtime);
2100 {
2101 FrameScope scope(masm, StackFrame::INTERNAL);
2102 __ Push(target, new_target, arguments_list);
2103 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2104 __ Pop(new_target, target);
2105 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2106 FixedArray::kLengthOffset));
2107 }
2108 __ B(&done_create);
2109
2110 // Try to create the list from an arguments object.
2111 __ Bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002112 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2113 JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002114 __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
2115 __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
2116 __ CompareAndBranch(len, x11, ne, &create_runtime);
2117 __ Mov(args, x10);
2118 __ B(&done_create);
2119
2120 // Try to create the list from a JSArray object.
2121 __ Bind(&create_array);
2122 __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
2123 __ DecodeField<Map::ElementsKindBits>(x10);
2124 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2125 STATIC_ASSERT(FAST_ELEMENTS == 2);
2126 // Branch for anything that's not FAST_{SMI_}ELEMENTS.
2127 __ TestAndBranchIfAnySet(x10, ~FAST_ELEMENTS, &create_runtime);
2128 __ Ldrsw(len,
2129 UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
2130 __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
2131
2132 __ Bind(&done_create);
2133 }
2134
2135 // Check for stack overflow.
2136 {
2137 // Check the stack for overflow. We are not trying to catch interruptions
2138 // (i.e. debug break and preemption) here, so check the "real stack limit".
2139 Label done;
2140 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
2141 // Make x10 the space we have left. The stack might already be overflowed
2142 // here which will cause x10 to become negative.
2143 __ Sub(x10, masm->StackPointer(), x10);
2144 // Check if the arguments will overflow the stack.
2145 __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
2146 __ B(gt, &done); // Signed comparison.
2147 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2148 __ Bind(&done);
2149 }
2150
2151 // ----------- S t a t e -------------
2152 // -- x0 : args (a FixedArray built from argumentsList)
2153 // -- x1 : target
2154 // -- x2 : len (number of elements to push from args)
2155 // -- x3 : new.target (checked to be constructor or undefined)
2156 // -- jssp[0] : thisArgument
2157 // -----------------------------------
2158
2159 // Push arguments onto the stack (thisArgument is already on the stack).
2160 {
2161 Label done, loop;
2162 Register src = x4;
2163
2164 __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
2165 __ Mov(x0, len); // The 'len' argument for Call() or Construct().
2166 __ Cbz(len, &done);
2167 __ Claim(len);
2168 __ Bind(&loop);
2169 __ Sub(len, len, 1);
2170 __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
2171 __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
2172 __ Cbnz(len, &loop);
2173 __ Bind(&done);
2174 }
2175
2176 // ----------- S t a t e -------------
2177 // -- x0 : argument count (len)
2178 // -- x1 : target
2179 // -- x3 : new.target (checked to be constructor or undefined)
2180 // -- jssp[0] : args[len-1]
2181 // -- jssp[8] : args[len-2]
2182 // ... : ...
2183 // -- jssp[8*(len-2)] : args[1]
2184 // -- jssp[8*(len-1)] : args[0]
2185 // -----------------------------------
2186
2187 // Dispatch to Call or Construct depending on whether new.target is undefined.
2188 {
2189 __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
2190 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2191 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2192 }
2193}
2194
Ben Murdoch097c5b22016-05-18 11:27:45 +01002195namespace {
2196
2197// Drops top JavaScript frame and an arguments adaptor frame below it (if
2198// present) preserving all the arguments prepared for current call.
2199// Does nothing if debugger is currently active.
2200// ES6 14.6.3. PrepareForTailCall
2201//
2202// Stack structure for the function g() tail calling f():
2203//
2204// ------- Caller frame: -------
2205// | ...
2206// | g()'s arg M
2207// | ...
2208// | g()'s arg 1
2209// | g()'s receiver arg
2210// | g()'s caller pc
2211// ------- g()'s frame: -------
2212// | g()'s caller fp <- fp
2213// | g()'s context
2214// | function pointer: g
2215// | -------------------------
2216// | ...
2217// | ...
2218// | f()'s arg N
2219// | ...
2220// | f()'s arg 1
2221// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2222// ----------------------
2223//
2224void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2225 Register scratch1, Register scratch2,
2226 Register scratch3) {
2227 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2228 Comment cmnt(masm, "[ PrepareForTailCall");
2229
Ben Murdochda12d292016-06-02 14:46:10 +01002230 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002231 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002232 ExternalReference is_tail_call_elimination_enabled =
2233 ExternalReference::is_tail_call_elimination_enabled_address(
2234 masm->isolate());
2235 __ Mov(scratch1, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002236 __ Ldrb(scratch1, MemOperand(scratch1));
2237 __ Cmp(scratch1, Operand(0));
Ben Murdochda12d292016-06-02 14:46:10 +01002238 __ B(eq, &done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002239
2240 // Drop possible interpreter handler/stub frame.
2241 {
2242 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002243 __ Ldr(scratch3,
2244 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002245 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
2246 __ B(ne, &no_interpreter_frame);
2247 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2248 __ bind(&no_interpreter_frame);
2249 }
2250
2251 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002252 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002253 Label no_arguments_adaptor, formal_parameter_count_loaded;
2254 __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2255 __ Ldr(scratch3,
Ben Murdochda12d292016-06-02 14:46:10 +01002256 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002257 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2258 __ B(ne, &no_arguments_adaptor);
2259
Ben Murdochda12d292016-06-02 14:46:10 +01002260 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002261 __ mov(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002262 __ Ldr(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002263 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002264 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002265 __ B(&formal_parameter_count_loaded);
2266
2267 __ bind(&no_arguments_adaptor);
2268 // Load caller's formal parameter count
2269 __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2270 __ Ldr(scratch1,
2271 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002272 __ Ldrsw(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002273 FieldMemOperand(scratch1,
2274 SharedFunctionInfo::kFormalParameterCountOffset));
2275 __ bind(&formal_parameter_count_loaded);
2276
Ben Murdochda12d292016-06-02 14:46:10 +01002277 ParameterCount callee_args_count(args_reg);
2278 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2279 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002280 __ bind(&done);
2281}
2282} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002283
2284// static
2285void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002286 ConvertReceiverMode mode,
2287 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002288 ASM_LOCATION("Builtins::Generate_CallFunction");
2289 // ----------- S t a t e -------------
2290 // -- x0 : the number of arguments (not including the receiver)
2291 // -- x1 : the function to call (checked to be a JSFunction)
2292 // -----------------------------------
2293 __ AssertFunction(x1);
2294
2295 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2296 // Check that function is not a "classConstructor".
2297 Label class_constructor;
2298 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2299 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
2300 __ TestAndBranchIfAnySet(
2301 w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
2302 (1 << SharedFunctionInfo::kIsSubclassConstructor) |
2303 (1 << SharedFunctionInfo::kIsBaseConstructor),
2304 &class_constructor);
2305
2306 // Enter the context of the function; ToObject has to run in the function
2307 // context, and we also need to take the global proxy from the function
2308 // context in case of conversion.
2309 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2310 // We need to convert the receiver for non-native sloppy mode functions.
2311 Label done_convert;
2312 __ TestAndBranchIfAnySet(w3,
2313 (1 << SharedFunctionInfo::kNative) |
2314 (1 << SharedFunctionInfo::kStrictModeFunction),
2315 &done_convert);
2316 {
2317 // ----------- S t a t e -------------
2318 // -- x0 : the number of arguments (not including the receiver)
2319 // -- x1 : the function to call (checked to be a JSFunction)
2320 // -- x2 : the shared function info.
2321 // -- cp : the function context.
2322 // -----------------------------------
2323
2324 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2325 // Patch receiver to global proxy.
2326 __ LoadGlobalProxy(x3);
2327 } else {
2328 Label convert_to_object, convert_receiver;
2329 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2330 __ JumpIfSmi(x3, &convert_to_object);
2331 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2332 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2333 __ B(hs, &done_convert);
2334 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2335 Label convert_global_proxy;
2336 __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
2337 &convert_global_proxy);
2338 __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
2339 __ Bind(&convert_global_proxy);
2340 {
2341 // Patch receiver to global proxy.
2342 __ LoadGlobalProxy(x3);
2343 }
2344 __ B(&convert_receiver);
2345 }
2346 __ Bind(&convert_to_object);
2347 {
2348 // Convert receiver using ToObject.
2349 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2350 // in the fast case? (fall back to AllocateInNewSpace?)
2351 FrameScope scope(masm, StackFrame::INTERNAL);
2352 __ SmiTag(x0);
2353 __ Push(x0, x1);
2354 __ Mov(x0, x3);
2355 ToObjectStub stub(masm->isolate());
2356 __ CallStub(&stub);
2357 __ Mov(x3, x0);
2358 __ Pop(x1, x0);
2359 __ SmiUntag(x0);
2360 }
2361 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2362 __ Bind(&convert_receiver);
2363 }
2364 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2365 }
2366 __ Bind(&done_convert);
2367
2368 // ----------- S t a t e -------------
2369 // -- x0 : the number of arguments (not including the receiver)
2370 // -- x1 : the function to call (checked to be a JSFunction)
2371 // -- x2 : the shared function info.
2372 // -- cp : the function context.
2373 // -----------------------------------
2374
Ben Murdoch097c5b22016-05-18 11:27:45 +01002375 if (tail_call_mode == TailCallMode::kAllow) {
2376 PrepareForTailCall(masm, x0, x3, x4, x5);
2377 }
2378
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002379 __ Ldrsw(
2380 x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2381 ParameterCount actual(x0);
2382 ParameterCount expected(x2);
2383 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
2384 CheckDebugStepCallWrapper());
2385
2386 // The function is a "classConstructor", need to raise an exception.
2387 __ bind(&class_constructor);
2388 {
2389 FrameScope frame(masm, StackFrame::INTERNAL);
2390 __ Push(x1);
2391 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2392 }
2393}
2394
2395
2396namespace {
2397
2398void Generate_PushBoundArguments(MacroAssembler* masm) {
2399 // ----------- S t a t e -------------
2400 // -- x0 : the number of arguments (not including the receiver)
2401 // -- x1 : target (checked to be a JSBoundFunction)
2402 // -- x3 : new.target (only in case of [[Construct]])
2403 // -----------------------------------
2404
2405 // Load [[BoundArguments]] into x2 and length of that into x4.
2406 Label no_bound_arguments;
2407 __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2408 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2409 __ Cmp(x4, 0);
2410 __ B(eq, &no_bound_arguments);
2411 {
2412 // ----------- S t a t e -------------
2413 // -- x0 : the number of arguments (not including the receiver)
2414 // -- x1 : target (checked to be a JSBoundFunction)
2415 // -- x2 : the [[BoundArguments]] (implemented as FixedArray)
2416 // -- x3 : new.target (only in case of [[Construct]])
2417 // -- x4 : the number of [[BoundArguments]]
2418 // -----------------------------------
2419
2420 // Reserve stack space for the [[BoundArguments]].
2421 {
2422 Label done;
2423 __ Claim(x4);
2424 // Check the stack for overflow. We are not trying to catch interruptions
2425 // (i.e. debug break and preemption) here, so check the "real stack
2426 // limit".
2427 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
2428 __ B(gt, &done); // Signed comparison.
2429 // Restore the stack pointer.
2430 __ Drop(x4);
2431 {
2432 FrameScope scope(masm, StackFrame::MANUAL);
2433 __ EnterFrame(StackFrame::INTERNAL);
2434 __ CallRuntime(Runtime::kThrowStackOverflow);
2435 }
2436 __ Bind(&done);
2437 }
2438
2439 // Relocate arguments down the stack.
2440 {
2441 Label loop, done_loop;
2442 __ Mov(x5, 0);
2443 __ Bind(&loop);
2444 __ Cmp(x5, x0);
2445 __ B(gt, &done_loop);
2446 __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
2447 __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
2448 __ Add(x4, x4, 1);
2449 __ Add(x5, x5, 1);
2450 __ B(&loop);
2451 __ Bind(&done_loop);
2452 }
2453
2454 // Copy [[BoundArguments]] to the stack (below the arguments).
2455 {
2456 Label loop;
2457 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2458 __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
2459 __ Bind(&loop);
2460 __ Sub(x4, x4, 1);
2461 __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
2462 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2463 __ Add(x0, x0, 1);
2464 __ Cmp(x4, 0);
2465 __ B(gt, &loop);
2466 }
2467 }
2468 __ Bind(&no_bound_arguments);
2469}
2470
2471} // namespace
2472
2473
2474// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002475void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2476 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002477 // ----------- S t a t e -------------
2478 // -- x0 : the number of arguments (not including the receiver)
2479 // -- x1 : the function to call (checked to be a JSBoundFunction)
2480 // -----------------------------------
2481 __ AssertBoundFunction(x1);
2482
Ben Murdoch097c5b22016-05-18 11:27:45 +01002483 if (tail_call_mode == TailCallMode::kAllow) {
2484 PrepareForTailCall(masm, x0, x3, x4, x5);
2485 }
2486
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002487 // Patch the receiver to [[BoundThis]].
2488 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2489 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2490
2491 // Push the [[BoundArguments]] onto the stack.
2492 Generate_PushBoundArguments(masm);
2493
2494 // Call the [[BoundTargetFunction]] via the Call builtin.
2495 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2496 __ Mov(x10,
2497 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2498 __ Ldr(x11, MemOperand(x10));
2499 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2500 __ Br(x12);
2501}
2502
2503
2504// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002505void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2506 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002507 // ----------- S t a t e -------------
2508 // -- x0 : the number of arguments (not including the receiver)
2509 // -- x1 : the target to call (can be any Object).
2510 // -----------------------------------
2511
2512 Label non_callable, non_function, non_smi;
2513 __ JumpIfSmi(x1, &non_callable);
2514 __ Bind(&non_smi);
2515 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002516 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002517 RelocInfo::CODE_TARGET, eq);
2518 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002519 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002520 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002521
2522 // Check if target has a [[Call]] internal method.
2523 __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2524 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
2525
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002526 __ Cmp(x5, JS_PROXY_TYPE);
2527 __ B(ne, &non_function);
2528
Ben Murdoch097c5b22016-05-18 11:27:45 +01002529 // 0. Prepare for tail call if necessary.
2530 if (tail_call_mode == TailCallMode::kAllow) {
2531 PrepareForTailCall(masm, x0, x3, x4, x5);
2532 }
2533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002534 // 1. Runtime fallback for Proxy [[Call]].
2535 __ Push(x1);
2536 // Increase the arguments size to include the pushed function and the
2537 // existing receiver on the stack.
2538 __ Add(x0, x0, Operand(2));
2539 // Tail-call to the runtime.
2540 __ JumpToExternalReference(
2541 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2542
2543 // 2. Call to something else, which might have a [[Call]] internal method (if
2544 // not we raise an exception).
2545 __ Bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002546 // Overwrite the original receiver with the (original) target.
2547 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2548 // Let the "call_as_function_delegate" take care of the rest.
2549 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2550 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002551 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002552 RelocInfo::CODE_TARGET);
2553
2554 // 3. Call to something that is not callable.
2555 __ bind(&non_callable);
2556 {
2557 FrameScope scope(masm, StackFrame::INTERNAL);
2558 __ Push(x1);
2559 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2560 }
2561}
2562
2563
2564// static
2565void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2566 // ----------- S t a t e -------------
2567 // -- x0 : the number of arguments (not including the receiver)
2568 // -- x1 : the constructor to call (checked to be a JSFunction)
2569 // -- x3 : the new target (checked to be a constructor)
2570 // -----------------------------------
2571 __ AssertFunction(x1);
2572
2573 // Calling convention for function specific ConstructStubs require
2574 // x2 to contain either an AllocationSite or undefined.
2575 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2576
2577 // Tail call to the function-specific construct stub (still in the caller
2578 // context at this point).
2579 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2580 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2581 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2582 __ Br(x4);
2583}
2584
2585
2586// static
2587void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2588 // ----------- S t a t e -------------
2589 // -- x0 : the number of arguments (not including the receiver)
2590 // -- x1 : the function to call (checked to be a JSBoundFunction)
2591 // -- x3 : the new target (checked to be a constructor)
2592 // -----------------------------------
2593 __ AssertBoundFunction(x1);
2594
2595 // Push the [[BoundArguments]] onto the stack.
2596 Generate_PushBoundArguments(masm);
2597
2598 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2599 {
2600 Label done;
2601 __ Cmp(x1, x3);
2602 __ B(ne, &done);
2603 __ Ldr(x3,
2604 FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2605 __ Bind(&done);
2606 }
2607
2608 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2609 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2610 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2611 __ Ldr(x11, MemOperand(x10));
2612 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2613 __ Br(x12);
2614}
2615
2616
2617// static
2618void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2619 // ----------- S t a t e -------------
2620 // -- x0 : the number of arguments (not including the receiver)
2621 // -- x1 : the constructor to call (checked to be a JSProxy)
2622 // -- x3 : the new target (either the same as the constructor or
2623 // the JSFunction on which new was invoked initially)
2624 // -----------------------------------
2625
2626 // Call into the Runtime for Proxy [[Construct]].
2627 __ Push(x1);
2628 __ Push(x3);
2629 // Include the pushed new_target, constructor and the receiver.
2630 __ Add(x0, x0, 3);
2631 // Tail-call to the runtime.
2632 __ JumpToExternalReference(
2633 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2634}
2635
2636
2637// static
2638void Builtins::Generate_Construct(MacroAssembler* masm) {
2639 // ----------- S t a t e -------------
2640 // -- x0 : the number of arguments (not including the receiver)
2641 // -- x1 : the constructor to call (can be any Object)
2642 // -- x3 : the new target (either the same as the constructor or
2643 // the JSFunction on which new was invoked initially)
2644 // -----------------------------------
2645
2646 // Check if target is a Smi.
2647 Label non_constructor;
2648 __ JumpIfSmi(x1, &non_constructor);
2649
2650 // Dispatch based on instance type.
2651 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2652 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2653 RelocInfo::CODE_TARGET, eq);
2654
2655 // Check if target has a [[Construct]] internal method.
2656 __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
2657 __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
2658
2659 // Only dispatch to bound functions after checking whether they are
2660 // constructors.
2661 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2662 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2663 RelocInfo::CODE_TARGET, eq);
2664
2665 // Only dispatch to proxies after checking whether they are constructors.
2666 __ Cmp(x5, JS_PROXY_TYPE);
2667 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2668 eq);
2669
2670 // Called Construct on an exotic Object with a [[Construct]] internal method.
2671 {
2672 // Overwrite the original receiver with the (original) target.
2673 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2674 // Let the "call_as_constructor_delegate" take care of the rest.
2675 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
2676 __ Jump(masm->isolate()->builtins()->CallFunction(),
2677 RelocInfo::CODE_TARGET);
2678 }
2679
2680 // Called Construct on an Object that doesn't have a [[Construct]] internal
2681 // method.
2682 __ bind(&non_constructor);
2683 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2684 RelocInfo::CODE_TARGET);
2685}
2686
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002687// static
Ben Murdochc5610432016-08-08 18:44:38 +01002688void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2689 ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002690 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01002691 // -- x1 : requested object size (untagged)
2692 // -- lr : return address
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002693 // -----------------------------------
Ben Murdochc5610432016-08-08 18:44:38 +01002694 __ SmiTag(x1);
2695 __ Push(x1);
2696 __ Move(cp, Smi::FromInt(0));
2697 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002698}
2699
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002700// static
Ben Murdochc5610432016-08-08 18:44:38 +01002701void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2702 ASM_LOCATION("Builtins::Generate_AllocateInOldSpace");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002703 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01002704 // -- x1 : requested object size (untagged)
2705 // -- lr : return address
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002706 // -----------------------------------
Ben Murdochc5610432016-08-08 18:44:38 +01002707 __ SmiTag(x1);
2708 __ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2709 __ Push(x1, x2);
2710 __ Move(cp, Smi::FromInt(0));
2711 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002712}
2713
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002714void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2715 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
2716 // ----------- S t a t e -------------
2717 // -- x0 : actual number of arguments
2718 // -- x1 : function (passed through to callee)
2719 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002720 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002721 // -----------------------------------
2722
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002723 Register argc_actual = x0; // Excluding the receiver.
2724 Register argc_expected = x2; // Excluding the receiver.
2725 Register function = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002726 Register code_entry = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002727
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002728 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002729
2730 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002731 __ Cmp(argc_actual, argc_expected);
2732 __ B(lt, &too_few);
2733 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2734 __ B(eq, &dont_adapt_arguments);
2735
2736 { // Enough parameters: actual >= expected
2737 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002738 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002739
2740 Register copy_start = x10;
2741 Register copy_end = x11;
2742 Register copy_to = x12;
2743 Register scratch1 = x13, scratch2 = x14;
2744
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002745 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002746
2747 // Adjust for fp, lr, and the receiver.
2748 __ Add(copy_start, fp, 3 * kPointerSize);
2749 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002750 __ Sub(copy_end, copy_start, scratch2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002751 __ Sub(copy_end, copy_end, kPointerSize);
2752 __ Mov(copy_to, jssp);
2753
2754 // Claim space for the arguments, the receiver, and one extra slot.
2755 // The extra slot ensures we do not write under jssp. It will be popped
2756 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002757 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002758 __ Claim(scratch1, 1);
2759
2760 // Copy the arguments (including the receiver) to the new stack frame.
2761 Label copy_2_by_2;
2762 __ Bind(&copy_2_by_2);
2763 __ Ldp(scratch1, scratch2,
2764 MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
2765 __ Stp(scratch1, scratch2,
2766 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2767 __ Cmp(copy_start, copy_end);
2768 __ B(hi, &copy_2_by_2);
2769
2770 // Correct the space allocated for the extra slot.
2771 __ Drop(1);
2772
2773 __ B(&invoke);
2774 }
2775
2776 { // Too few parameters: Actual < expected
2777 __ Bind(&too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002778
2779 Register copy_from = x10;
2780 Register copy_end = x11;
2781 Register copy_to = x12;
2782 Register scratch1 = x13, scratch2 = x14;
2783
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002784 EnterArgumentsAdaptorFrame(masm);
2785 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2786
2787 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002788 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
2789
2790 // Adjust for fp, lr, and the receiver.
2791 __ Add(copy_from, fp, 3 * kPointerSize);
2792 __ Add(copy_from, copy_from, argc_actual);
2793 __ Mov(copy_to, jssp);
2794 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
2795 __ Sub(copy_end, copy_end, argc_actual);
2796
2797 // Claim space for the arguments, the receiver, and one extra slot.
2798 // The extra slot ensures we do not write under jssp. It will be popped
2799 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002800 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002801 __ Claim(scratch1, 1);
2802
2803 // Copy the arguments (including the receiver) to the new stack frame.
2804 Label copy_2_by_2;
2805 __ Bind(&copy_2_by_2);
2806 __ Ldp(scratch1, scratch2,
2807 MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
2808 __ Stp(scratch1, scratch2,
2809 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2810 __ Cmp(copy_to, copy_end);
2811 __ B(hi, &copy_2_by_2);
2812
2813 __ Mov(copy_to, copy_end);
2814
2815 // Fill the remaining expected arguments with undefined.
2816 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
2817 __ Add(copy_end, jssp, kPointerSize);
2818
2819 Label fill;
2820 __ Bind(&fill);
2821 __ Stp(scratch1, scratch1,
2822 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2823 __ Cmp(copy_to, copy_end);
2824 __ B(hi, &fill);
2825
2826 // Correct the space allocated for the extra slot.
2827 __ Drop(1);
2828 }
2829
2830 // Arguments have been adapted. Now call the entry point.
2831 __ Bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002832 __ Mov(argc_actual, argc_expected);
2833 // x0 : expected number of arguments
2834 // x1 : function (passed through to callee)
2835 // x3 : new target (passed through to callee)
2836 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002837 __ Call(code_entry);
2838
2839 // Store offset of return address for deoptimizer.
2840 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2841
2842 // Exit frame and return.
2843 LeaveArgumentsAdaptorFrame(masm);
2844 __ Ret();
2845
2846 // Call the entry point without adapting the arguments.
2847 __ Bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002848 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002849 __ Jump(code_entry);
2850
2851 __ Bind(&stack_overflow);
2852 {
2853 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002854 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002855 __ Unreachable();
2856 }
2857}
2858
2859
2860#undef __
2861
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002862} // namespace internal
2863} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002864
2865#endif // V8_TARGET_ARCH_ARM