blob: 44bfc1762d83cf327db52032b1cf54e3719be5be [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM64
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16
17
18#define __ ACCESS_MASM(masm)
19
20
21// Load the built-in Array function from the current context.
22static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025}
26
27
28// Load the built-in InternalArray function from the current context.
29static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
30 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033}
34
35
36void Builtins::Generate_Adaptor(MacroAssembler* masm,
37 CFunctionId id,
38 BuiltinExtraArguments extra_args) {
39 // ----------- S t a t e -------------
40 // -- x0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 // -- x1 : target
42 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 // -- sp[0] : last argument
44 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000045 // -- sp[4 * (argc - 1)] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046 // -- sp[4 * argc] : receiver
47 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000048 __ AssertFunction(x1);
49
50 // Make sure we operate in the context of the called function (for example
51 // ConstructStubs implemented in C++ will be run in the context of the caller
52 // instead of the callee, due to the way that [[Construct]] is defined for
53 // ordinary functions).
54 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055
56 // Insert extra arguments.
57 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 switch (extra_args) {
59 case BuiltinExtraArguments::kTarget:
60 __ Push(x1);
61 ++num_extra_args;
62 break;
63 case BuiltinExtraArguments::kNewTarget:
64 __ Push(x3);
65 ++num_extra_args;
66 break;
67 case BuiltinExtraArguments::kTargetAndNewTarget:
68 __ Push(x1, x3);
69 num_extra_args += 2;
70 break;
71 case BuiltinExtraArguments::kNone:
72 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 }
74
75 // JumpToExternalReference expects x0 to contain the number of arguments
76 // including the receiver and the extra arguments.
77 __ Add(x0, x0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
80}
81
82
83void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
84 // ----------- S t a t e -------------
85 // -- x0 : number of arguments
86 // -- lr : return address
87 // -- sp[...]: constructor arguments
88 // -----------------------------------
89 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
90 Label generic_array_code;
91
92 // Get the InternalArray function.
93 GenerateLoadInternalArrayFunction(masm, x1);
94
95 if (FLAG_debug_code) {
96 // Initial map for the builtin InternalArray functions should be maps.
97 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
98 __ Tst(x10, kSmiTagMask);
99 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
100 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
102 }
103
104 // Run the native code for the InternalArray function called as a normal
105 // function.
106 InternalArrayConstructorStub stub(masm->isolate());
107 __ TailCallStub(&stub);
108}
109
110
111void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
112 // ----------- S t a t e -------------
113 // -- x0 : number of arguments
114 // -- lr : return address
115 // -- sp[...]: constructor arguments
116 // -----------------------------------
117 ASM_LOCATION("Builtins::Generate_ArrayCode");
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
119
120 // Get the Array function.
121 GenerateLoadArrayFunction(masm, x1);
122
123 if (FLAG_debug_code) {
124 // Initial map for the builtin Array functions should be maps.
125 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
126 __ Tst(x10, kSmiTagMask);
127 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
128 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
129 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130 }
131
132 // Run the native code for the Array function called as a normal function.
133 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 __ Mov(x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 ArrayConstructorStub stub(masm->isolate());
136 __ TailCallStub(&stub);
137}
138
139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100141void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
142 // ----------- S t a t e -------------
143 // -- x0 : number of arguments
144 // -- lr : return address
145 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
146 // -- sp[(argc + 1) * 8] : receiver
147 // -----------------------------------
148 ASM_LOCATION("Builtins::Generate_MathMaxMin");
149
150 Heap::RootListIndex const root_index =
151 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
152 : Heap::kMinusInfinityValueRootIndex;
153
154 // Load the accumulator with the default return value (either -Infinity or
155 // +Infinity), with the tagged value in x1 and the double value in d1.
156 __ LoadRoot(x1, root_index);
157 __ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
158
159 // Remember how many slots to drop (including the receiver).
160 __ Add(x4, x0, 1);
161
162 Label done_loop, loop;
163 __ Bind(&loop);
164 {
165 // Check if all parameters done.
166 __ Subs(x0, x0, 1);
167 __ B(lt, &done_loop);
168
169 // Load the next parameter tagged value into x2.
170 __ Peek(x2, Operand(x0, LSL, kPointerSizeLog2));
171
172 // Load the double value of the parameter into d2, maybe converting the
173 // parameter to a number first using the ToNumberStub if necessary.
174 Label convert_smi, convert_number, done_convert;
175 __ JumpIfSmi(x2, &convert_smi);
176 __ JumpIfHeapNumber(x2, &convert_number);
177 {
178 // Parameter is not a Number, use the ToNumberStub to convert it.
179 FrameScope scope(masm, StackFrame::INTERNAL);
180 __ SmiTag(x0);
181 __ SmiTag(x4);
182 __ Push(x0, x1, x4);
183 __ Mov(x0, x2);
184 ToNumberStub stub(masm->isolate());
185 __ CallStub(&stub);
186 __ Mov(x2, x0);
187 __ Pop(x4, x1, x0);
188 {
189 // Restore the double accumulator value (d1).
190 Label done_restore;
191 __ SmiUntagToDouble(d1, x1, kSpeculativeUntag);
192 __ JumpIfSmi(x1, &done_restore);
193 __ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
194 __ Bind(&done_restore);
195 }
196 __ SmiUntag(x4);
197 __ SmiUntag(x0);
198 }
199 __ AssertNumber(x2);
200 __ JumpIfSmi(x2, &convert_smi);
201
202 __ Bind(&convert_number);
203 __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
204 __ B(&done_convert);
205
206 __ Bind(&convert_smi);
207 __ SmiUntagToDouble(d2, x2);
208 __ Bind(&done_convert);
209
210 // We can use a single fmin/fmax for the operation itself, but we then need
211 // to work out which HeapNumber (or smi) the result came from.
212 __ Fmov(x11, d1);
213 if (kind == MathMaxMinKind::kMin) {
214 __ Fmin(d1, d1, d2);
215 } else {
216 DCHECK(kind == MathMaxMinKind::kMax);
217 __ Fmax(d1, d1, d2);
218 }
219 __ Fmov(x10, d1);
220 __ Cmp(x10, x11);
221 __ Csel(x1, x1, x2, eq);
222 __ B(&loop);
223 }
224
225 __ Bind(&done_loop);
226 __ Mov(x0, x1);
227 __ Drop(x4);
228 __ Ret();
229}
230
231// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000232void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000233 // ----------- S t a t e -------------
234 // -- x0 : number of arguments
235 // -- x1 : constructor function
236 // -- lr : return address
237 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
238 // -- sp[argc * 8] : receiver
239 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 ASM_LOCATION("Builtins::Generate_NumberConstructor");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242 // 1. Load the first argument into x0 and get rid of the rest (including the
243 // receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 Label no_arguments;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000245 {
246 __ Cbz(x0, &no_arguments);
247 __ Sub(x0, x0, 1);
248 __ Drop(x0);
249 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 // 2a. Convert first argument to number.
253 ToNumberStub stub(masm->isolate());
254 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 // 2b. No arguments, return +0 (already in x0).
257 __ Bind(&no_arguments);
258 __ Drop(1);
259 __ Ret();
260}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000261
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263// static
264void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
265 // ----------- S t a t e -------------
266 // -- x0 : number of arguments
267 // -- x1 : constructor function
268 // -- x3 : new target
269 // -- lr : return address
270 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
271 // -- sp[argc * 8] : receiver
272 // -----------------------------------
273 ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
274
275 // 1. Make sure we operate in the context of the called function.
276 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
277
278 // 2. Load the first argument into x2 and get rid of the rest (including the
279 // receiver).
280 {
281 Label no_arguments, done;
282 __ Cbz(x0, &no_arguments);
283 __ Sub(x0, x0, 1);
284 __ Drop(x0);
285 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
286 __ B(&done);
287 __ Bind(&no_arguments);
288 __ Drop(1);
289 __ Mov(x2, Smi::FromInt(0));
290 __ Bind(&done);
291 }
292
293 // 3. Make sure x2 is a number.
294 {
295 Label done_convert;
296 __ JumpIfSmi(x2, &done_convert);
297 __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
298 {
299 FrameScope scope(masm, StackFrame::INTERNAL);
300 __ Push(x1, x3);
301 __ Move(x0, x2);
302 ToNumberStub stub(masm->isolate());
303 __ CallStub(&stub);
304 __ Move(x2, x0);
305 __ Pop(x3, x1);
306 }
307 __ Bind(&done_convert);
308 }
309
310 // 4. Check if new target and constructor differ.
311 Label new_object;
312 __ Cmp(x1, x3);
313 __ B(ne, &new_object);
314
315 // 5. Allocate a JSValue wrapper for the number.
316 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 __ Ret();
318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 // 6. Fallback to the runtime to create new object.
320 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000321 {
322 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100323 __ Push(x2); // first argument
324 FastNewObjectStub stub(masm->isolate());
325 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
329 __ Ret();
330}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332
333// static
334void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
335 // ----------- S t a t e -------------
336 // -- x0 : number of arguments
337 // -- x1 : constructor function
338 // -- lr : return address
339 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
340 // -- sp[argc * 8] : receiver
341 // -----------------------------------
342 ASM_LOCATION("Builtins::Generate_StringConstructor");
343
344 // 1. Load the first argument into x0 and get rid of the rest (including the
345 // receiver).
346 Label no_arguments;
347 {
348 __ Cbz(x0, &no_arguments);
349 __ Sub(x0, x0, 1);
350 __ Drop(x0);
351 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
352 }
353
354 // 2a. At least one argument, return x0 if it's a string, otherwise
355 // dispatch to appropriate conversion.
356 Label to_string, symbol_descriptive_string;
357 {
358 __ JumpIfSmi(x0, &to_string);
359 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
360 __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
361 __ B(hi, &to_string);
362 __ B(eq, &symbol_descriptive_string);
363 __ Ret();
364 }
365
366 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 __ Bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 {
369 __ LoadRoot(x0, Heap::kempty_stringRootIndex);
370 __ Drop(1);
371 __ Ret();
372 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 // 3a. Convert x0 to a string.
375 __ Bind(&to_string);
376 {
377 ToStringStub stub(masm->isolate());
378 __ TailCallStub(&stub);
379 }
380
381 // 3b. Convert symbol in x0 to a string.
382 __ Bind(&symbol_descriptive_string);
383 {
384 __ Push(x0);
385 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
386 }
387}
388
389
390// static
391void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
392 // ----------- S t a t e -------------
393 // -- x0 : number of arguments
394 // -- x1 : constructor function
395 // -- x3 : new target
396 // -- lr : return address
397 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
398 // -- sp[argc * 8] : receiver
399 // -----------------------------------
400 ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
401
402 // 1. Make sure we operate in the context of the called function.
403 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
404
405 // 2. Load the first argument into x2 and get rid of the rest (including the
406 // receiver).
407 {
408 Label no_arguments, done;
409 __ Cbz(x0, &no_arguments);
410 __ Sub(x0, x0, 1);
411 __ Drop(x0);
412 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
413 __ B(&done);
414 __ Bind(&no_arguments);
415 __ Drop(1);
416 __ LoadRoot(x2, Heap::kempty_stringRootIndex);
417 __ Bind(&done);
418 }
419
420 // 3. Make sure x2 is a string.
421 {
422 Label convert, done_convert;
423 __ JumpIfSmi(x2, &convert);
424 __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
425 __ Bind(&convert);
426 {
427 FrameScope scope(masm, StackFrame::INTERNAL);
428 ToStringStub stub(masm->isolate());
429 __ Push(x1, x3);
430 __ Move(x0, x2);
431 __ CallStub(&stub);
432 __ Move(x2, x0);
433 __ Pop(x3, x1);
434 }
435 __ Bind(&done_convert);
436 }
437
438 // 4. Check if new target and constructor differ.
439 Label new_object;
440 __ Cmp(x1, x3);
441 __ B(ne, &new_object);
442
443 // 5. Allocate a JSValue wrapper for the string.
444 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
445 __ Ret();
446
447 // 6. Fallback to the runtime to create new object.
448 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 {
450 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100451 __ Push(x2); // first argument
452 FastNewObjectStub stub(masm->isolate());
453 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000454 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 __ Ret();
458}
459
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
461 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
462 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
463 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
464 __ Br(x2);
465}
466
Ben Murdoch097c5b22016-05-18 11:27:45 +0100467static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
468 Runtime::FunctionId function_id) {
469 // ----------- S t a t e -------------
470 // -- x0 : argument count (preserved for callee)
471 // -- x1 : target function (preserved for callee)
472 // -- x3 : new target (preserved for callee)
473 // -----------------------------------
474 {
475 FrameScope scope(masm, StackFrame::INTERNAL);
476 // Push a copy of the target function and the new target.
477 // Push another copy as a parameter to the runtime call.
478 __ SmiTag(x0);
479 __ Push(x0, x1, x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481 __ CallRuntime(function_id, 1);
482 __ Move(x2, x0);
483
484 // Restore target function and new target.
485 __ Pop(x3, x1, x0);
486 __ SmiUntag(x0);
487 }
488
489 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
490 __ Br(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000491}
492
493
494void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
495 // Checking whether the queued function is ready for install is optional,
496 // since we come across interrupts and stack checks elsewhere. However, not
497 // checking may delay installing ready functions, and always checking would be
498 // quite expensive. A good compromise is to first check against stack limit as
499 // a cue for an interrupt signal.
500 Label ok;
501 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
502 __ B(hs, &ok);
503
Ben Murdoch097c5b22016-05-18 11:27:45 +0100504 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505
506 __ Bind(&ok);
507 GenerateTailCallToSharedCode(masm);
508}
509
510
511static void Generate_JSConstructStubHelper(MacroAssembler* masm,
512 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100513 bool create_implicit_receiver,
514 bool check_derived_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515 // ----------- S t a t e -------------
516 // -- x0 : number of arguments
517 // -- x1 : constructor function
518 // -- x2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 // -- lr : return address
Ben Murdochda12d292016-06-02 14:46:10 +0100521 // -- cp : context pointer
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522 // -- sp[...]: constructor arguments
523 // -----------------------------------
524
525 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000526
527 Isolate* isolate = masm->isolate();
528
529 // Enter a construct frame.
530 {
531 FrameScope scope(masm, StackFrame::CONSTRUCT);
532
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 // Preserve the four incoming parameters on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 Register argc = x0;
535 Register constructor = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 Register allocation_site = x2;
537 Register new_target = x3;
538
539 // Preserve the incoming parameters on the stack.
540 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
Ben Murdochda12d292016-06-02 14:46:10 +0100541 __ Push(cp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 __ SmiTag(argc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 __ Push(allocation_site, argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100546 // Allocate the new receiver object.
547 __ Push(constructor, new_target);
548 FastNewObjectStub stub(masm->isolate());
549 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550 __ Mov(x4, x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 __ Pop(new_target, constructor);
552
Ben Murdoch097c5b22016-05-18 11:27:45 +0100553 // ----------- S t a t e -------------
554 // -- x1: constructor function
555 // -- x3: new target
556 // -- x4: newly allocated object
557 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000558
559 // Reload the number of arguments from the stack.
560 // Set it up in x0 for the function call below.
561 // jssp[0]: number of arguments (smi-tagged)
562 __ Peek(argc, 0); // Load number of arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 }
564
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000565 __ SmiUntag(argc);
566
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 if (create_implicit_receiver) {
568 // Push the allocated receiver to the stack. We need two copies
569 // because we may have to return the original one and the calling
570 // conventions dictate that the called function pops the receiver.
571 __ Push(x4, x4);
572 } else {
573 __ PushRoot(Heap::kTheHoleValueRootIndex);
574 }
575
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576 // Set up pointer to last argument.
577 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
578
579 // Copy arguments and receiver to the expression stack.
580 // Copy 2 values every loop to use ldp/stp.
581 // x0: number of arguments
582 // x1: constructor function
583 // x2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000584 // x3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 // jssp[0]: receiver
586 // jssp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000587 // jssp[2]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000588 // Compute the start address of the copy in x3.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000589 __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000590 Label loop, entry, done_copying_arguments;
591 __ B(&entry);
592 __ Bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 __ Push(x11, x10);
595 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000596 __ Cmp(x4, x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597 __ B(gt, &loop);
598 // Because we copied values 2 by 2 we may have copied one extra value.
599 // Drop it if that is the case.
600 __ B(eq, &done_copying_arguments);
601 __ Drop(1);
602 __ Bind(&done_copying_arguments);
603
604 // Call the function.
605 // x0: number of arguments
606 // x1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607 // x3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 if (is_api_function) {
609 __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset));
610 Handle<Code> code =
611 masm->isolate()->builtins()->HandleApiCallConstruct();
612 __ Call(code, RelocInfo::CODE_TARGET);
613 } else {
614 ParameterCount actual(argc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000615 __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
616 CheckDebugStepCallWrapper());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617 }
618
619 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 if (create_implicit_receiver && !is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
622 }
623
624 // Restore the context from the frame.
625 // x0: result
626 // jssp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000627 // jssp[1]: number of arguments (smi-tagged)
Ben Murdochda12d292016-06-02 14:46:10 +0100628 __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630 if (create_implicit_receiver) {
631 // If the result is an object (in the ECMA sense), we should get rid
632 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
633 // on page 74.
634 Label use_receiver, exit;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000636 // If the result is a smi, it is *not* an object in the ECMA sense.
637 // x0: result
638 // jssp[0]: receiver (newly allocated object)
639 // jssp[1]: number of arguments (smi-tagged)
640 __ JumpIfSmi(x0, &use_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000641
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000642 // If the type of the result (stored in its map) is less than
643 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
644 __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 // Throw away the result of the constructor invocation and use the
647 // on-stack receiver as the result.
648 __ Bind(&use_receiver);
649 __ Peek(x0, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000650
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000651 // Remove the receiver from the stack, remove caller arguments, and
652 // return.
653 __ Bind(&exit);
654 // x0: result
655 // jssp[0]: receiver (newly allocated object)
656 // jssp[1]: number of arguments (smi-tagged)
657 __ Peek(x1, 1 * kXRegSize);
658 } else {
659 __ Peek(x1, 0);
660 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000661
662 // Leave construct frame.
663 }
664
Ben Murdoch097c5b22016-05-18 11:27:45 +0100665 // ES6 9.2.2. Step 13+
666 // Check that the result is not a Smi, indicating that the constructor result
667 // from a derived class is neither undefined nor an Object.
668 if (check_derived_construct) {
669 Label dont_throw;
670 __ JumpIfNotSmi(x0, &dont_throw);
671 {
672 FrameScope scope(masm, StackFrame::INTERNAL);
673 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
674 }
675 __ Bind(&dont_throw);
676 }
677
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 __ DropBySMI(x1);
679 __ Drop(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680 if (create_implicit_receiver) {
681 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
682 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000683 __ Ret();
684}
685
686
687void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100688 Generate_JSConstructStubHelper(masm, false, true, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000689}
690
691
692void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100693 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694}
695
696
697void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100698 Generate_JSConstructStubHelper(masm, false, false, false);
699}
700
701
702void Builtins::Generate_JSBuiltinsConstructStubForDerived(
703 MacroAssembler* masm) {
704 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705}
706
707
708void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
709 FrameScope scope(masm, StackFrame::INTERNAL);
710 __ Push(x1);
711 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
712}
713
714
715enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
716
717
718// Clobbers x10, x15; preserves all other registers.
719static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
720 IsTagged argc_is_tagged) {
721 // Check the stack for overflow.
722 // We are not trying to catch interruptions (e.g. debug break and
723 // preemption) here, so the "real stack limit" is checked.
724 Label enough_stack_space;
725 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
726 // Make x10 the space we have left. The stack might already be overflowed
727 // here which will cause x10 to become negative.
728 // TODO(jbramley): Check that the stack usage here is safe.
729 __ Sub(x10, jssp, x10);
730 // Check if the arguments will overflow the stack.
731 if (argc_is_tagged == kArgcIsSmiTagged) {
732 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
733 } else {
734 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
735 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
736 }
737 __ B(gt, &enough_stack_space);
738 __ CallRuntime(Runtime::kThrowStackOverflow);
739 // We should never return from the APPLY_OVERFLOW builtin.
740 if (__ emit_debug_code()) {
741 __ Unreachable();
742 }
743
744 __ Bind(&enough_stack_space);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000745}
746
747
748// Input:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000749// x0: new.target.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000750// x1: function.
751// x2: receiver.
752// x3: argc.
753// x4: argv.
754// Output:
755// x0: result.
756static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
757 bool is_construct) {
758 // Called from JSEntryStub::GenerateBody().
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000759 Register new_target = x0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000760 Register function = x1;
761 Register receiver = x2;
762 Register argc = x3;
763 Register argv = x4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000764 Register scratch = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000765
766 ProfileEntryHookStub::MaybeCallEntryHook(masm);
767
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000768 {
769 // Enter an internal frame.
770 FrameScope scope(masm, StackFrame::INTERNAL);
771
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000772 // Setup the context (we need to use the caller context from the isolate).
773 __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
774 masm->isolate())));
775 __ Ldr(cp, MemOperand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000776
777 __ InitializeRootRegister();
778
779 // Push the function and the receiver onto the stack.
780 __ Push(function, receiver);
781
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000782 // Check if we have enough stack space to push all arguments.
783 // Expects argument count in eax. Clobbers ecx, edx, edi.
784 Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
785
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000786 // Copy arguments to the stack in a loop, in reverse order.
787 // x3: argc.
788 // x4: argv.
789 Label loop, entry;
790 // Compute the copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000792
793 __ B(&entry);
794 __ Bind(&loop);
795 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
796 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
797 __ Push(x12); // Push the argument.
798 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000799 __ Cmp(scratch, argv);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000800 __ B(ne, &loop);
801
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000802 __ Mov(scratch, argc);
803 __ Mov(argc, new_target);
804 __ Mov(new_target, scratch);
805 // x0: argc.
806 // x3: new.target.
807
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000808 // Initialize all JavaScript callee-saved registers, since they will be seen
809 // by the garbage collector as part of handlers.
810 // The original values have been saved in JSEntryStub::GenerateBody().
811 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
812 __ Mov(x20, x19);
813 __ Mov(x21, x19);
814 __ Mov(x22, x19);
815 __ Mov(x23, x19);
816 __ Mov(x24, x19);
817 __ Mov(x25, x19);
818 // Don't initialize the reserved registers.
819 // x26 : root register (root).
820 // x27 : context pointer (cp).
821 // x28 : JS stack pointer (jssp).
822 // x29 : frame pointer (fp).
823
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000824 Handle<Code> builtin = is_construct
825 ? masm->isolate()->builtins()->Construct()
826 : masm->isolate()->builtins()->Call();
827 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000829 // Exit the JS internal frame and remove the parameters (except function),
830 // and return.
831 }
832
833 // Result is in x0. Return.
834 __ Ret();
835}
836
837
838void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
839 Generate_JSEntryTrampolineHelper(masm, false);
840}
841
842
843void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
844 Generate_JSEntryTrampolineHelper(masm, true);
845}
846
847
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000848// Generate code for entering a JS function with the interpreter.
849// On entry to the function the receiver and arguments have been pushed on the
850// stack left to right. The actual argument count matches the formal parameter
851// count expected by the function.
852//
853// The live registers are:
854// - x1: the JS function object being called.
855// - x3: the new target
856// - cp: our context.
857// - fp: our caller's frame pointer.
858// - jssp: stack pointer.
859// - lr: return address.
860//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100861// The function builds an interpreter frame. See InterpreterFrameConstants in
862// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000863void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
864 // Open a frame scope to indicate that there is a frame on the stack. The
865 // MANUAL indicates that the scope shouldn't actually generate code to set up
866 // the frame (that is done below).
867 FrameScope frame_scope(masm, StackFrame::MANUAL);
868 __ Push(lr, fp, cp, x1);
869 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000870
871 // Get the bytecode array from the function object and load the pointer to the
872 // first entry into kInterpreterBytecodeRegister.
873 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100874 Register debug_info = kInterpreterBytecodeArrayRegister;
875 Label load_debug_bytecode_array, bytecode_array_loaded;
876 DCHECK(!debug_info.is(x0));
877 __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
878 __ Cmp(debug_info, Operand(DebugInfo::uninitialized()));
879 __ B(ne, &load_debug_bytecode_array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000880 __ Ldr(kInterpreterBytecodeArrayRegister,
881 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100882 __ Bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000883
884 if (FLAG_debug_code) {
885 // Check function data field is actually a BytecodeArray object.
886 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
887 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
888 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
889 BYTECODE_ARRAY_TYPE);
890 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
891 }
892
Ben Murdoch097c5b22016-05-18 11:27:45 +0100893 // Push new.target, bytecode array and zero for bytecode array offset.
894 __ Mov(x0, Operand(0));
895 __ Push(x3, kInterpreterBytecodeArrayRegister, x0);
896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000897 // Allocate the local and temporary register file on the stack.
898 {
899 // Load frame size from the BytecodeArray object.
900 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
901 BytecodeArray::kFrameSizeOffset));
902
903 // Do a stack check to ensure we don't go over the limit.
904 Label ok;
905 DCHECK(jssp.Is(__ StackPointer()));
906 __ Sub(x10, jssp, Operand(x11));
907 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
908 __ B(hs, &ok);
909 __ CallRuntime(Runtime::kThrowStackOverflow);
910 __ Bind(&ok);
911
912 // If ok, push undefined as the initial value for all register file entries.
913 // Note: there should always be at least one stack slot for the return
914 // register in the register file.
915 Label loop_header;
916 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
917 // TODO(rmcilroy): Ensure we always have an even number of registers to
918 // allow stack to be 16 bit aligned (and remove need for jssp).
919 __ Lsr(x11, x11, kPointerSizeLog2);
920 __ PushMultipleTimes(x10, x11);
921 __ Bind(&loop_header);
922 }
923
924 // TODO(rmcilroy): List of things not currently dealt with here but done in
925 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000926 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000927 // - Code aging of the BytecodeArray object.
928
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000929 // Load accumulator, register file, bytecode offset, dispatch table into
930 // registers.
931 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
932 __ Add(kInterpreterRegisterFileRegister, fp,
933 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
934 __ Mov(kInterpreterBytecodeOffsetRegister,
935 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100936 __ Mov(kInterpreterDispatchTableRegister,
937 Operand(ExternalReference::interpreter_dispatch_table_address(
938 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000939
940 // Dispatch to the first bytecode handler for the function.
941 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
942 kInterpreterBytecodeOffsetRegister));
943 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
944 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
945 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
946 // and header removal.
947 __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
948 __ Call(ip0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100949
950 // Even though the first bytecode handler was called, we will never return.
951 __ Abort(kUnexpectedReturnFromBytecodeHandler);
952
953 // Load debug copy of the bytecode array.
954 __ Bind(&load_debug_bytecode_array);
955 __ Ldr(kInterpreterBytecodeArrayRegister,
956 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
957 __ B(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000958}
959
960
961void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
962 // TODO(rmcilroy): List of things not currently dealt with here but done in
963 // fullcodegen's EmitReturnSequence.
964 // - Supporting FLAG_trace for Runtime::TraceExit.
965 // - Support profiler (specifically decrementing profiling_counter
966 // appropriately and calling out to HandleInterrupts if necessary).
967
968 // The return value is in accumulator, which is already in x0.
969
970 // Leave the frame (also dropping the register file).
971 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
972
973 // Drop receiver + arguments and return.
974 __ Ldr(w1, FieldMemOperand(kInterpreterBytecodeArrayRegister,
975 BytecodeArray::kParameterSizeOffset));
976 __ Drop(x1, 1);
977 __ Ret();
978}
979
980
Ben Murdoch097c5b22016-05-18 11:27:45 +0100981static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000982 // Initialize register file register and dispatch table register.
983 __ Add(kInterpreterRegisterFileRegister, fp,
984 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100985 __ Mov(kInterpreterDispatchTableRegister,
986 Operand(ExternalReference::interpreter_dispatch_table_address(
987 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988
989 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000990 __ Ldr(kContextRegister,
991 MemOperand(kInterpreterRegisterFileRegister,
992 InterpreterFrameConstants::kContextFromRegisterPointer));
993
994 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100995 __ Ldr(
996 kInterpreterBytecodeArrayRegister,
997 MemOperand(kInterpreterRegisterFileRegister,
998 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999
1000 if (FLAG_debug_code) {
1001 // Check function data field is actually a BytecodeArray object.
1002 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1003 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1004 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1005 BYTECODE_ARRAY_TYPE);
1006 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1007 }
1008
1009 // Get the target bytecode offset from the frame.
1010 __ Ldr(kInterpreterBytecodeOffsetRegister,
1011 MemOperand(
1012 kInterpreterRegisterFileRegister,
1013 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1014 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1015
1016 // Dispatch to the target bytecode.
1017 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1018 kInterpreterBytecodeOffsetRegister));
1019 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1020 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1021 __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
1022 __ Jump(ip0);
1023}
1024
1025
Ben Murdoch097c5b22016-05-18 11:27:45 +01001026static void Generate_InterpreterNotifyDeoptimizedHelper(
1027 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1028 // Enter an internal frame.
1029 {
1030 FrameScope scope(masm, StackFrame::INTERNAL);
1031
1032 // Pass the deoptimization type to the runtime system.
1033 __ Mov(x1, Operand(Smi::FromInt(static_cast<int>(type))));
1034 __ Push(x1);
1035 __ CallRuntime(Runtime::kNotifyDeoptimized);
1036 // Tear down internal frame.
1037 }
1038
1039 // Drop state (we don't use these for interpreter deopts) and and pop the
1040 // accumulator value into the accumulator register.
1041 __ Drop(1);
1042 __ Pop(kInterpreterAccumulatorRegister);
1043
1044 // Enter the bytecode dispatch.
1045 Generate_EnterBytecodeDispatch(masm);
1046}
1047
1048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1050 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1051}
1052
1053
1054void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1055 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1056}
1057
1058
1059void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1060 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1061}
1062
Ben Murdoch097c5b22016-05-18 11:27:45 +01001063void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1064 // Set the address of the interpreter entry trampoline as a return address.
1065 // This simulates the initial call to bytecode handlers in interpreter entry
1066 // trampoline. The return will never actually be taken, but our stack walker
1067 // uses this address to determine whether a frame is interpreted.
1068 __ LoadObject(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1069
1070 Generate_EnterBytecodeDispatch(masm);
1071}
1072
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001073
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001075 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001076}
1077
1078
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001079void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001080 GenerateTailCallToReturnedCode(masm,
1081 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082}
1083
1084
1085void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001086 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001087}
1088
1089
1090static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1091 // For now, we are relying on the fact that make_code_young doesn't do any
1092 // garbage collection which allows us to save/restore the registers without
1093 // worrying about which of them contain pointers. We also don't build an
1094 // internal frame to make the code fast, since we shouldn't have to do stack
1095 // crawls in MakeCodeYoung. This seems a bit fragile.
1096
1097 // The following caller-saved registers must be saved and restored when
1098 // calling through to the runtime:
1099 // x0 - The address from which to resume execution.
1100 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 // lr - The return address for the JSFunction itself. It has not yet been
1103 // preserved on the stack because the frame setup code was replaced
1104 // with a call to this stub, to handle code ageing.
1105 {
1106 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001107 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001108 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1109 __ CallCFunction(
1110 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001111 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001112 }
1113
1114 // The calling function has been made young again, so return to execute the
1115 // real frame set-up code.
1116 __ Br(x0);
1117}
1118
1119#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1120void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1121 MacroAssembler* masm) { \
1122 GenerateMakeCodeYoungAgainCommon(masm); \
1123} \
1124void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1125 MacroAssembler* masm) { \
1126 GenerateMakeCodeYoungAgainCommon(masm); \
1127}
1128CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1129#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1130
1131
1132void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1133 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1134 // that make_code_young doesn't do any garbage collection which allows us to
1135 // save/restore the registers without worrying about which of them contain
1136 // pointers.
1137
1138 // The following caller-saved registers must be saved and restored when
1139 // calling through to the runtime:
1140 // x0 - The address from which to resume execution.
1141 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001142 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001143 // lr - The return address for the JSFunction itself. It has not yet been
1144 // preserved on the stack because the frame setup code was replaced
1145 // with a call to this stub, to handle code ageing.
1146 {
1147 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001148 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001149 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1150 __ CallCFunction(
1151 ExternalReference::get_mark_code_as_executed_function(
1152 masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001153 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154
1155 // Perform prologue operations usually performed by the young code stub.
1156 __ EmitFrameSetupForCodeAgePatching(masm);
1157 }
1158
1159 // Jump to point after the code-age stub.
1160 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1161 __ Br(x0);
1162}
1163
1164
1165void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1166 GenerateMakeCodeYoungAgainCommon(masm);
1167}
1168
1169
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001170void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1171 Generate_MarkCodeAsExecutedOnce(masm);
1172}
1173
1174
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001175static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1176 SaveFPRegsMode save_doubles) {
1177 {
1178 FrameScope scope(masm, StackFrame::INTERNAL);
1179
1180 // Preserve registers across notification, this is important for compiled
1181 // stubs that tail call the runtime on deopts passing their parameters in
1182 // registers.
1183 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1184 // registers here? According to the comment above, we should only need to
1185 // preserve the registers with parameters.
1186 __ PushXRegList(kSafepointSavedRegisters);
1187 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001188 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001189 __ PopXRegList(kSafepointSavedRegisters);
1190 }
1191
1192 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1193 __ Drop(1);
1194
1195 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1196 // into lr before it jumps here.
1197 __ Br(lr);
1198}
1199
1200
1201void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1202 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1203}
1204
1205
1206void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1207 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1208}
1209
1210
1211static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1212 Deoptimizer::BailoutType type) {
1213 {
1214 FrameScope scope(masm, StackFrame::INTERNAL);
1215 // Pass the deoptimization type to the runtime system.
1216 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1217 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001218 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001219 }
1220
1221 // Get the full codegen state from the stack and untag it.
1222 Register state = x6;
1223 __ Peek(state, 0);
1224 __ SmiUntag(state);
1225
1226 // Switch on the state.
1227 Label with_tos_register, unknown_state;
1228 __ CompareAndBranch(
1229 state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register);
1230 __ Drop(1); // Remove state.
1231 __ Ret();
1232
1233 __ Bind(&with_tos_register);
1234 // Reload TOS register.
1235 __ Peek(x0, kPointerSize);
1236 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state);
1237 __ Drop(2); // Remove state and TOS.
1238 __ Ret();
1239
1240 __ Bind(&unknown_state);
1241 __ Abort(kInvalidFullCodegenState);
1242}
1243
1244
1245void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1246 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1247}
1248
1249
1250void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1251 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1252}
1253
1254
1255void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1256 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1257}
1258
1259
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001260static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1261 Register function_template_info,
1262 Register scratch0, Register scratch1,
1263 Register scratch2,
1264 Label* receiver_check_failed) {
1265 Register signature = scratch0;
1266 Register map = scratch1;
1267 Register constructor = scratch2;
1268
1269 // If there is no signature, return the holder.
1270 __ Ldr(signature, FieldMemOperand(function_template_info,
1271 FunctionTemplateInfo::kSignatureOffset));
1272 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1273 Label receiver_check_passed;
1274 __ B(eq, &receiver_check_passed);
1275
1276 // Walk the prototype chain.
1277 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1278 Label prototype_loop_start;
1279 __ Bind(&prototype_loop_start);
1280
1281 // Get the constructor, if any
1282 __ GetMapConstructor(constructor, map, x16, x16);
1283 __ cmp(x16, Operand(JS_FUNCTION_TYPE));
1284 Label next_prototype;
1285 __ B(ne, &next_prototype);
1286 Register type = constructor;
1287 __ Ldr(type,
1288 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1289 __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1290
1291 // Loop through the chain of inheriting function templates.
1292 Label function_template_loop;
1293 __ Bind(&function_template_loop);
1294
1295 // If the signatures match, we have a compatible receiver.
1296 __ Cmp(signature, type);
1297 __ B(eq, &receiver_check_passed);
1298
1299 // If the current type is not a FunctionTemplateInfo, load the next prototype
1300 // in the chain.
1301 __ JumpIfSmi(type, &next_prototype);
1302 __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
1303 __ B(ne, &next_prototype);
1304
1305 // Otherwise load the parent function template and iterate.
1306 __ Ldr(type,
1307 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1308 __ B(&function_template_loop);
1309
1310 // Load the next prototype.
1311 __ Bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001312 __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001313 __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001314 __ B(eq, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001315 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1316 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001317 // Iterate.
1318 __ B(&prototype_loop_start);
1319
1320 __ Bind(&receiver_check_passed);
1321}
1322
1323
1324void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1325 // ----------- S t a t e -------------
1326 // -- x0 : number of arguments excluding receiver
1327 // -- x1 : callee
1328 // -- lr : return address
1329 // -- sp[0] : last argument
1330 // -- ...
1331 // -- sp[8 * (argc - 1)] : first argument
1332 // -- sp[8 * argc] : receiver
1333 // -----------------------------------
1334
1335 // Load the FunctionTemplateInfo.
1336 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1337 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
1338
1339 // Do the compatible receiver check.
1340 Label receiver_check_failed;
1341 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1342 CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
1343
1344 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1345 // beginning of the code.
1346 __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
1347 __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
1348 __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
1349 __ Jump(x4);
1350
1351 // Compatible receiver check failed: throw an Illegal Invocation exception.
1352 __ Bind(&receiver_check_failed);
1353 // Drop the arguments (including the receiver)
1354 __ add(x0, x0, Operand(1));
1355 __ Drop(x0);
1356 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1357}
1358
1359
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001360void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1361 // Lookup the function in the JavaScript frame.
1362 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1363 {
1364 FrameScope scope(masm, StackFrame::INTERNAL);
1365 // Pass function as argument.
1366 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001368 }
1369
1370 // If the code object is null, just return to the unoptimized code.
1371 Label skip;
1372 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1373 __ Ret();
1374
1375 __ Bind(&skip);
1376
1377 // Load deoptimization data from the code object.
1378 // <deopt_data> = <code>[#deoptimization_data_offset]
1379 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1380
1381 // Load the OSR entrypoint offset from the deoptimization data.
1382 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1383 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1384 DeoptimizationInputData::kOsrPcOffsetIndex)));
1385
1386 // Compute the target address = code_obj + header_size + osr_offset
1387 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1388 __ Add(x0, x0, x1);
1389 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1390
1391 // And "return" to the OSR entry point of the function.
1392 __ Ret();
1393}
1394
1395
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001396// static
1397void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1398 int field_index) {
1399 // ----------- S t a t e -------------
1400 // -- lr : return address
1401 // -- jssp[0] : receiver
1402 // -----------------------------------
1403 ASM_LOCATION("Builtins::Generate_DatePrototype_GetField");
1404
1405 // 1. Pop receiver into x0 and check that it's actually a JSDate object.
1406 Label receiver_not_date;
1407 {
1408 __ Pop(x0);
1409 __ JumpIfSmi(x0, &receiver_not_date);
1410 __ JumpIfNotObjectType(x0, x1, x2, JS_DATE_TYPE, &receiver_not_date);
1411 }
1412
1413 // 2. Load the specified date field, falling back to the runtime as necessary.
1414 if (field_index == JSDate::kDateValue) {
1415 __ Ldr(x0, FieldMemOperand(x0, JSDate::kValueOffset));
1416 } else {
1417 if (field_index < JSDate::kFirstUncachedField) {
1418 Label stamp_mismatch;
1419 __ Mov(x1, ExternalReference::date_cache_stamp(masm->isolate()));
1420 __ Ldr(x1, MemOperand(x1));
1421 __ Ldr(x2, FieldMemOperand(x0, JSDate::kCacheStampOffset));
1422 __ Cmp(x1, x2);
1423 __ B(ne, &stamp_mismatch);
1424 __ Ldr(x0, FieldMemOperand(
1425 x0, JSDate::kValueOffset + field_index * kPointerSize));
1426 __ Ret();
1427 __ Bind(&stamp_mismatch);
1428 }
1429 FrameScope scope(masm, StackFrame::INTERNAL);
1430 __ Mov(x1, Smi::FromInt(field_index));
1431 __ CallCFunction(
1432 ExternalReference::get_date_field_function(masm->isolate()), 2);
1433 }
1434 __ Ret();
1435
1436 // 3. Raise a TypeError if the receiver is not a date.
1437 __ Bind(&receiver_not_date);
1438 __ TailCallRuntime(Runtime::kThrowNotDateError);
1439}
1440
Ben Murdochda12d292016-06-02 14:46:10 +01001441// static
1442void Builtins::Generate_FunctionHasInstance(MacroAssembler* masm) {
1443 // ----------- S t a t e -------------
1444 // -- x0 : argc
1445 // -- jssp[0] : first argument (left-hand side)
1446 // -- jssp[8] : receiver (right-hand side)
1447 // -----------------------------------
1448 ASM_LOCATION("Builtins::Generate_FunctionHasInstance");
1449
1450 {
1451 FrameScope scope(masm, StackFrame::INTERNAL);
1452 __ Ldr(InstanceOfDescriptor::LeftRegister(),
1453 MemOperand(fp, 2 * kPointerSize)); // Load left-hand side.
1454 __ Ldr(InstanceOfDescriptor::RightRegister(),
1455 MemOperand(fp, 3 * kPointerSize)); // Load right-hand side.
1456 InstanceOfStub stub(masm->isolate(), true);
1457 __ CallStub(&stub);
1458 }
1459
1460 // Pop the argument and the receiver.
1461 __ Drop(2);
1462 __ Ret();
1463}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001464
1465// static
1466void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1467 // ----------- S t a t e -------------
1468 // -- x0 : argc
1469 // -- jssp[0] : argArray (if argc == 2)
1470 // -- jssp[8] : thisArg (if argc >= 1)
1471 // -- jssp[16] : receiver
1472 // -----------------------------------
1473 ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
1474
1475 Register argc = x0;
1476 Register arg_array = x0;
1477 Register receiver = x1;
1478 Register this_arg = x2;
1479 Register undefined_value = x3;
1480 Register null_value = x4;
1481
1482 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1483 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1484
1485 // 1. Load receiver into x1, argArray into x0 (if present), remove all
1486 // arguments from the stack (including the receiver), and push thisArg (if
1487 // present) instead.
1488 {
1489 // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
1490 // consistent state for a simple pop operation.
1491 __ Claim(2);
1492 __ Drop(argc);
1493
1494 // ----------- S t a t e -------------
1495 // -- x0 : argc
1496 // -- jssp[0] : argArray (dummy value if argc <= 1)
1497 // -- jssp[8] : thisArg (dummy value if argc == 0)
1498 // -- jssp[16] : receiver
1499 // -----------------------------------
1500 __ Cmp(argc, 1);
1501 __ Pop(arg_array, this_arg); // Overwrites argc.
1502 __ CmovX(this_arg, undefined_value, lo); // undefined if argc == 0.
1503 __ CmovX(arg_array, undefined_value, ls); // undefined if argc <= 1.
1504
1505 __ Peek(receiver, 0);
1506 __ Poke(this_arg, 0);
1507 }
1508
1509 // ----------- S t a t e -------------
1510 // -- x0 : argArray
1511 // -- x1 : receiver
1512 // -- x3 : undefined root value
1513 // -- jssp[0] : thisArg
1514 // -----------------------------------
1515
1516 // 2. Make sure the receiver is actually callable.
1517 Label receiver_not_callable;
1518 __ JumpIfSmi(receiver, &receiver_not_callable);
1519 __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1520 __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
1521 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
1522 &receiver_not_callable);
1523
1524 // 3. Tail call with no arguments if argArray is null or undefined.
1525 Label no_arguments;
1526 __ Cmp(arg_array, null_value);
1527 __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1528 __ B(eq, &no_arguments);
1529
1530 // 4a. Apply the receiver to the given argArray (passing undefined for
1531 // new.target in x3).
1532 DCHECK(undefined_value.Is(x3));
1533 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1534
1535 // 4b. The argArray is either null or undefined, so we tail call without any
1536 // arguments to the receiver.
1537 __ Bind(&no_arguments);
1538 {
1539 __ Mov(x0, 0);
1540 DCHECK(receiver.Is(x1));
1541 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1542 }
1543
1544 // 4c. The receiver is not callable, throw an appropriate TypeError.
1545 __ Bind(&receiver_not_callable);
1546 {
1547 __ Poke(receiver, 0);
1548 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1549 }
1550}
1551
1552
1553// static
1554void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001555 Register argc = x0;
1556 Register function = x1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557 Register scratch1 = x10;
1558 Register scratch2 = x11;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001559
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1561
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001562 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563 {
1564 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001565 __ Cbnz(argc, &done);
1566 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1567 __ Push(scratch1);
1568 __ Mov(argc, 1);
1569 __ Bind(&done);
1570 }
1571
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 // 2. Get the callable to call (passed as receiver) from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001573 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001574
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001576 // (overwriting the original receiver). Adjust argument count to make
1577 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001578 {
1579 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001580 // Calculate the copy start address (destination). Copy end address is jssp.
1581 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1582 __ Sub(scratch1, scratch2, kPointerSize);
1583
1584 __ Bind(&loop);
1585 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1586 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1587 __ Cmp(scratch1, jssp);
1588 __ B(ge, &loop);
1589 // Adjust the actual number of arguments and remove the top element
1590 // (which is a copy of the last argument).
1591 __ Sub(argc, argc, 1);
1592 __ Drop(1);
1593 }
1594
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001595 // 4. Call the callable.
1596 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001597}
1598
1599
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001600void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1601 // ----------- S t a t e -------------
1602 // -- x0 : argc
1603 // -- jssp[0] : argumentsList (if argc == 3)
1604 // -- jssp[8] : thisArgument (if argc >= 2)
1605 // -- jssp[16] : target (if argc >= 1)
1606 // -- jssp[24] : receiver
1607 // -----------------------------------
1608 ASM_LOCATION("Builtins::Generate_ReflectApply");
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 Register argc = x0;
1611 Register arguments_list = x0;
1612 Register target = x1;
1613 Register this_argument = x2;
1614 Register undefined_value = x3;
1615
1616 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1617
1618 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1619 // remove all arguments from the stack (including the receiver), and push
1620 // thisArgument (if present) instead.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001621 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001622 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1623 // consistent state for a simple pop operation.
1624 __ Claim(3);
1625 __ Drop(argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627 // ----------- S t a t e -------------
1628 // -- x0 : argc
1629 // -- jssp[0] : argumentsList (dummy value if argc <= 2)
1630 // -- jssp[8] : thisArgument (dummy value if argc <= 1)
1631 // -- jssp[16] : target (dummy value if argc == 0)
1632 // -- jssp[24] : receiver
1633 // -----------------------------------
1634 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1635 __ Pop(arguments_list, this_argument, target); // Overwrites argc.
1636 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1637 __ Cmp(x10, 2);
1638 __ CmovX(this_argument, undefined_value, lo); // undefined if argc <= 1.
1639 __ CmovX(arguments_list, undefined_value, ls); // undefined if argc <= 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641 __ Poke(this_argument, 0); // Overwrite receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001643
1644 // ----------- S t a t e -------------
1645 // -- x0 : argumentsList
1646 // -- x1 : target
1647 // -- jssp[0] : thisArgument
1648 // -----------------------------------
1649
1650 // 2. Make sure the target is actually callable.
1651 Label target_not_callable;
1652 __ JumpIfSmi(target, &target_not_callable);
1653 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1654 __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1655 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
1656
1657 // 3a. Apply the target to the given argumentsList (passing undefined for
1658 // new.target in x3).
1659 DCHECK(undefined_value.Is(x3));
1660 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1661
1662 // 3b. The target is not callable, throw an appropriate TypeError.
1663 __ Bind(&target_not_callable);
1664 {
1665 __ Poke(target, 0);
1666 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1667 }
1668}
1669
1670
1671void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1672 // ----------- S t a t e -------------
1673 // -- x0 : argc
1674 // -- jssp[0] : new.target (optional)
1675 // -- jssp[8] : argumentsList
1676 // -- jssp[16] : target
1677 // -- jssp[24] : receiver
1678 // -----------------------------------
1679 ASM_LOCATION("Builtins::Generate_ReflectConstruct");
1680
1681 Register argc = x0;
1682 Register arguments_list = x0;
1683 Register target = x1;
1684 Register new_target = x3;
1685 Register undefined_value = x4;
1686
1687 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1688
1689 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1690 // new.target into x3 (if present, otherwise use target), remove all
1691 // arguments from the stack (including the receiver), and push thisArgument
1692 // (if present) instead.
1693 {
1694 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1695 // consistent state for a simple pop operation.
1696 __ Claim(3);
1697 __ Drop(argc);
1698
1699 // ----------- S t a t e -------------
1700 // -- x0 : argc
1701 // -- jssp[0] : new.target (dummy value if argc <= 2)
1702 // -- jssp[8] : argumentsList (dummy value if argc <= 1)
1703 // -- jssp[16] : target (dummy value if argc == 0)
1704 // -- jssp[24] : receiver
1705 // -----------------------------------
1706 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1707 __ Pop(new_target, arguments_list, target); // Overwrites argc.
1708 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1709 __ Cmp(x10, 2);
1710 __ CmovX(arguments_list, undefined_value, lo); // undefined if argc <= 1.
1711 __ CmovX(new_target, target, ls); // target if argc <= 2.
1712
1713 __ Poke(undefined_value, 0); // Overwrite receiver.
1714 }
1715
1716 // ----------- S t a t e -------------
1717 // -- x0 : argumentsList
1718 // -- x1 : target
1719 // -- x3 : new.target
1720 // -- jssp[0] : receiver (undefined)
1721 // -----------------------------------
1722
1723 // 2. Make sure the target is actually a constructor.
1724 Label target_not_constructor;
1725 __ JumpIfSmi(target, &target_not_constructor);
1726 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1727 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1728 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1729 &target_not_constructor);
1730
1731 // 3. Make sure the new.target is actually a constructor.
1732 Label new_target_not_constructor;
1733 __ JumpIfSmi(new_target, &new_target_not_constructor);
1734 __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
1735 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1736 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1737 &new_target_not_constructor);
1738
1739 // 4a. Construct the target with the given new.target and argumentsList.
1740 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1741
1742 // 4b. The target is not a constructor, throw an appropriate TypeError.
1743 __ Bind(&target_not_constructor);
1744 {
1745 __ Poke(target, 0);
1746 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1747 }
1748
1749 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1750 __ Bind(&new_target_not_constructor);
1751 {
1752 __ Poke(new_target, 0);
1753 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1754 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755}
1756
1757
1758static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1759 Label* stack_overflow) {
1760 // ----------- S t a t e -------------
1761 // -- x0 : actual number of arguments
1762 // -- x1 : function (passed through to callee)
1763 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001765 // -----------------------------------
1766 // Check the stack for overflow.
1767 // We are not trying to catch interruptions (e.g. debug break and
1768 // preemption) here, so the "real stack limit" is checked.
1769 Label enough_stack_space;
1770 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1771 // Make x10 the space we have left. The stack might already be overflowed
1772 // here which will cause x10 to become negative.
1773 __ Sub(x10, jssp, x10);
1774 // Check if the arguments will overflow the stack.
1775 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
1776 __ B(le, stack_overflow);
1777}
1778
1779
1780static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1781 __ SmiTag(x10, x0);
1782 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1783 __ Push(lr, fp);
1784 __ Push(x11, x1, x10);
1785 __ Add(fp, jssp,
1786 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
1787}
1788
1789
1790static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1791 // ----------- S t a t e -------------
1792 // -- x0 : result being passed through
1793 // -----------------------------------
1794 // Get the number of arguments passed (as a smi), tear down the frame and
1795 // then drop the parameters and the receiver.
1796 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1797 kPointerSize)));
1798 __ Mov(jssp, fp);
1799 __ Pop(fp, lr);
1800 __ DropBySMI(x10, kXRegSize);
1801 __ Drop(1);
1802}
1803
1804
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001805// static
1806void Builtins::Generate_Apply(MacroAssembler* masm) {
1807 // ----------- S t a t e -------------
1808 // -- x0 : argumentsList
1809 // -- x1 : target
1810 // -- x3 : new.target (checked to be constructor or undefined)
1811 // -- jssp[0] : thisArgument
1812 // -----------------------------------
1813
1814 Register arguments_list = x0;
1815 Register target = x1;
1816 Register new_target = x3;
1817
1818 Register args = x0;
1819 Register len = x2;
1820
1821 // Create the list of arguments from the array-like argumentsList.
1822 {
1823 Label create_arguments, create_array, create_runtime, done_create;
1824 __ JumpIfSmi(arguments_list, &create_runtime);
1825
1826 // Load native context.
1827 Register native_context = x4;
1828 __ Ldr(native_context, NativeContextMemOperand());
1829
1830 // Load the map of argumentsList.
1831 Register arguments_list_map = x2;
1832 __ Ldr(arguments_list_map,
1833 FieldMemOperand(arguments_list, HeapObject::kMapOffset));
1834
1835 // Check if argumentsList is an (unmodified) arguments object.
1836 __ Ldr(x10, ContextMemOperand(native_context,
1837 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1838 __ Ldr(x11, ContextMemOperand(native_context,
1839 Context::STRICT_ARGUMENTS_MAP_INDEX));
1840 __ Cmp(arguments_list_map, x10);
1841 __ Ccmp(arguments_list_map, x11, ZFlag, ne);
1842 __ B(eq, &create_arguments);
1843
1844 // Check if argumentsList is a fast JSArray.
1845 __ CompareInstanceType(arguments_list_map, native_context, JS_ARRAY_TYPE);
1846 __ B(eq, &create_array);
1847
1848 // Ask the runtime to create the list (actually a FixedArray).
1849 __ Bind(&create_runtime);
1850 {
1851 FrameScope scope(masm, StackFrame::INTERNAL);
1852 __ Push(target, new_target, arguments_list);
1853 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1854 __ Pop(new_target, target);
1855 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
1856 FixedArray::kLengthOffset));
1857 }
1858 __ B(&done_create);
1859
1860 // Try to create the list from an arguments object.
1861 __ Bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001862 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
1863 JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001864 __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
1865 __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
1866 __ CompareAndBranch(len, x11, ne, &create_runtime);
1867 __ Mov(args, x10);
1868 __ B(&done_create);
1869
1870 // Try to create the list from a JSArray object.
1871 __ Bind(&create_array);
1872 __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
1873 __ DecodeField<Map::ElementsKindBits>(x10);
1874 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1875 STATIC_ASSERT(FAST_ELEMENTS == 2);
1876 // Branch for anything that's not FAST_{SMI_}ELEMENTS.
1877 __ TestAndBranchIfAnySet(x10, ~FAST_ELEMENTS, &create_runtime);
1878 __ Ldrsw(len,
1879 UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
1880 __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
1881
1882 __ Bind(&done_create);
1883 }
1884
1885 // Check for stack overflow.
1886 {
1887 // Check the stack for overflow. We are not trying to catch interruptions
1888 // (i.e. debug break and preemption) here, so check the "real stack limit".
1889 Label done;
1890 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1891 // Make x10 the space we have left. The stack might already be overflowed
1892 // here which will cause x10 to become negative.
1893 __ Sub(x10, masm->StackPointer(), x10);
1894 // Check if the arguments will overflow the stack.
1895 __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
1896 __ B(gt, &done); // Signed comparison.
1897 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1898 __ Bind(&done);
1899 }
1900
1901 // ----------- S t a t e -------------
1902 // -- x0 : args (a FixedArray built from argumentsList)
1903 // -- x1 : target
1904 // -- x2 : len (number of elements to push from args)
1905 // -- x3 : new.target (checked to be constructor or undefined)
1906 // -- jssp[0] : thisArgument
1907 // -----------------------------------
1908
1909 // Push arguments onto the stack (thisArgument is already on the stack).
1910 {
1911 Label done, loop;
1912 Register src = x4;
1913
1914 __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
1915 __ Mov(x0, len); // The 'len' argument for Call() or Construct().
1916 __ Cbz(len, &done);
1917 __ Claim(len);
1918 __ Bind(&loop);
1919 __ Sub(len, len, 1);
1920 __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
1921 __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
1922 __ Cbnz(len, &loop);
1923 __ Bind(&done);
1924 }
1925
1926 // ----------- S t a t e -------------
1927 // -- x0 : argument count (len)
1928 // -- x1 : target
1929 // -- x3 : new.target (checked to be constructor or undefined)
1930 // -- jssp[0] : args[len-1]
1931 // -- jssp[8] : args[len-2]
1932 // ... : ...
1933 // -- jssp[8*(len-2)] : args[1]
1934 // -- jssp[8*(len-1)] : args[0]
1935 // -----------------------------------
1936
1937 // Dispatch to Call or Construct depending on whether new.target is undefined.
1938 {
1939 __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
1940 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1941 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1942 }
1943}
1944
Ben Murdoch097c5b22016-05-18 11:27:45 +01001945namespace {
1946
1947// Drops top JavaScript frame and an arguments adaptor frame below it (if
1948// present) preserving all the arguments prepared for current call.
1949// Does nothing if debugger is currently active.
1950// ES6 14.6.3. PrepareForTailCall
1951//
1952// Stack structure for the function g() tail calling f():
1953//
1954// ------- Caller frame: -------
1955// | ...
1956// | g()'s arg M
1957// | ...
1958// | g()'s arg 1
1959// | g()'s receiver arg
1960// | g()'s caller pc
1961// ------- g()'s frame: -------
1962// | g()'s caller fp <- fp
1963// | g()'s context
1964// | function pointer: g
1965// | -------------------------
1966// | ...
1967// | ...
1968// | f()'s arg N
1969// | ...
1970// | f()'s arg 1
1971// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1972// ----------------------
1973//
1974void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1975 Register scratch1, Register scratch2,
1976 Register scratch3) {
1977 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1978 Comment cmnt(masm, "[ PrepareForTailCall");
1979
Ben Murdochda12d292016-06-02 14:46:10 +01001980 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001981 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01001982 ExternalReference is_tail_call_elimination_enabled =
1983 ExternalReference::is_tail_call_elimination_enabled_address(
1984 masm->isolate());
1985 __ Mov(scratch1, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001986 __ Ldrb(scratch1, MemOperand(scratch1));
1987 __ Cmp(scratch1, Operand(0));
Ben Murdochda12d292016-06-02 14:46:10 +01001988 __ B(eq, &done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001989
1990 // Drop possible interpreter handler/stub frame.
1991 {
1992 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01001993 __ Ldr(scratch3,
1994 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001995 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
1996 __ B(ne, &no_interpreter_frame);
1997 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1998 __ bind(&no_interpreter_frame);
1999 }
2000
2001 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002002 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002003 Label no_arguments_adaptor, formal_parameter_count_loaded;
2004 __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2005 __ Ldr(scratch3,
Ben Murdochda12d292016-06-02 14:46:10 +01002006 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002007 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2008 __ B(ne, &no_arguments_adaptor);
2009
Ben Murdochda12d292016-06-02 14:46:10 +01002010 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002011 __ mov(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002012 __ Ldr(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002013 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002014 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002015 __ B(&formal_parameter_count_loaded);
2016
2017 __ bind(&no_arguments_adaptor);
2018 // Load caller's formal parameter count
2019 __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2020 __ Ldr(scratch1,
2021 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002022 __ Ldrsw(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002023 FieldMemOperand(scratch1,
2024 SharedFunctionInfo::kFormalParameterCountOffset));
2025 __ bind(&formal_parameter_count_loaded);
2026
Ben Murdochda12d292016-06-02 14:46:10 +01002027 ParameterCount callee_args_count(args_reg);
2028 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2029 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002030 __ bind(&done);
2031}
2032} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033
2034// static
2035void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002036 ConvertReceiverMode mode,
2037 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002038 ASM_LOCATION("Builtins::Generate_CallFunction");
2039 // ----------- S t a t e -------------
2040 // -- x0 : the number of arguments (not including the receiver)
2041 // -- x1 : the function to call (checked to be a JSFunction)
2042 // -----------------------------------
2043 __ AssertFunction(x1);
2044
2045 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2046 // Check that function is not a "classConstructor".
2047 Label class_constructor;
2048 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2049 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
2050 __ TestAndBranchIfAnySet(
2051 w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
2052 (1 << SharedFunctionInfo::kIsSubclassConstructor) |
2053 (1 << SharedFunctionInfo::kIsBaseConstructor),
2054 &class_constructor);
2055
2056 // Enter the context of the function; ToObject has to run in the function
2057 // context, and we also need to take the global proxy from the function
2058 // context in case of conversion.
2059 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2060 // We need to convert the receiver for non-native sloppy mode functions.
2061 Label done_convert;
2062 __ TestAndBranchIfAnySet(w3,
2063 (1 << SharedFunctionInfo::kNative) |
2064 (1 << SharedFunctionInfo::kStrictModeFunction),
2065 &done_convert);
2066 {
2067 // ----------- S t a t e -------------
2068 // -- x0 : the number of arguments (not including the receiver)
2069 // -- x1 : the function to call (checked to be a JSFunction)
2070 // -- x2 : the shared function info.
2071 // -- cp : the function context.
2072 // -----------------------------------
2073
2074 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2075 // Patch receiver to global proxy.
2076 __ LoadGlobalProxy(x3);
2077 } else {
2078 Label convert_to_object, convert_receiver;
2079 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2080 __ JumpIfSmi(x3, &convert_to_object);
2081 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2082 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2083 __ B(hs, &done_convert);
2084 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2085 Label convert_global_proxy;
2086 __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
2087 &convert_global_proxy);
2088 __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
2089 __ Bind(&convert_global_proxy);
2090 {
2091 // Patch receiver to global proxy.
2092 __ LoadGlobalProxy(x3);
2093 }
2094 __ B(&convert_receiver);
2095 }
2096 __ Bind(&convert_to_object);
2097 {
2098 // Convert receiver using ToObject.
2099 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2100 // in the fast case? (fall back to AllocateInNewSpace?)
2101 FrameScope scope(masm, StackFrame::INTERNAL);
2102 __ SmiTag(x0);
2103 __ Push(x0, x1);
2104 __ Mov(x0, x3);
2105 ToObjectStub stub(masm->isolate());
2106 __ CallStub(&stub);
2107 __ Mov(x3, x0);
2108 __ Pop(x1, x0);
2109 __ SmiUntag(x0);
2110 }
2111 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2112 __ Bind(&convert_receiver);
2113 }
2114 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2115 }
2116 __ Bind(&done_convert);
2117
2118 // ----------- S t a t e -------------
2119 // -- x0 : the number of arguments (not including the receiver)
2120 // -- x1 : the function to call (checked to be a JSFunction)
2121 // -- x2 : the shared function info.
2122 // -- cp : the function context.
2123 // -----------------------------------
2124
Ben Murdoch097c5b22016-05-18 11:27:45 +01002125 if (tail_call_mode == TailCallMode::kAllow) {
2126 PrepareForTailCall(masm, x0, x3, x4, x5);
2127 }
2128
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002129 __ Ldrsw(
2130 x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2131 ParameterCount actual(x0);
2132 ParameterCount expected(x2);
2133 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
2134 CheckDebugStepCallWrapper());
2135
2136 // The function is a "classConstructor", need to raise an exception.
2137 __ bind(&class_constructor);
2138 {
2139 FrameScope frame(masm, StackFrame::INTERNAL);
2140 __ Push(x1);
2141 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2142 }
2143}
2144
2145
2146namespace {
2147
2148void Generate_PushBoundArguments(MacroAssembler* masm) {
2149 // ----------- S t a t e -------------
2150 // -- x0 : the number of arguments (not including the receiver)
2151 // -- x1 : target (checked to be a JSBoundFunction)
2152 // -- x3 : new.target (only in case of [[Construct]])
2153 // -----------------------------------
2154
2155 // Load [[BoundArguments]] into x2 and length of that into x4.
2156 Label no_bound_arguments;
2157 __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2158 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2159 __ Cmp(x4, 0);
2160 __ B(eq, &no_bound_arguments);
2161 {
2162 // ----------- S t a t e -------------
2163 // -- x0 : the number of arguments (not including the receiver)
2164 // -- x1 : target (checked to be a JSBoundFunction)
2165 // -- x2 : the [[BoundArguments]] (implemented as FixedArray)
2166 // -- x3 : new.target (only in case of [[Construct]])
2167 // -- x4 : the number of [[BoundArguments]]
2168 // -----------------------------------
2169
2170 // Reserve stack space for the [[BoundArguments]].
2171 {
2172 Label done;
2173 __ Claim(x4);
2174 // Check the stack for overflow. We are not trying to catch interruptions
2175 // (i.e. debug break and preemption) here, so check the "real stack
2176 // limit".
2177 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
2178 __ B(gt, &done); // Signed comparison.
2179 // Restore the stack pointer.
2180 __ Drop(x4);
2181 {
2182 FrameScope scope(masm, StackFrame::MANUAL);
2183 __ EnterFrame(StackFrame::INTERNAL);
2184 __ CallRuntime(Runtime::kThrowStackOverflow);
2185 }
2186 __ Bind(&done);
2187 }
2188
2189 // Relocate arguments down the stack.
2190 {
2191 Label loop, done_loop;
2192 __ Mov(x5, 0);
2193 __ Bind(&loop);
2194 __ Cmp(x5, x0);
2195 __ B(gt, &done_loop);
2196 __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
2197 __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
2198 __ Add(x4, x4, 1);
2199 __ Add(x5, x5, 1);
2200 __ B(&loop);
2201 __ Bind(&done_loop);
2202 }
2203
2204 // Copy [[BoundArguments]] to the stack (below the arguments).
2205 {
2206 Label loop;
2207 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2208 __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
2209 __ Bind(&loop);
2210 __ Sub(x4, x4, 1);
2211 __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
2212 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2213 __ Add(x0, x0, 1);
2214 __ Cmp(x4, 0);
2215 __ B(gt, &loop);
2216 }
2217 }
2218 __ Bind(&no_bound_arguments);
2219}
2220
2221} // namespace
2222
2223
2224// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002225void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2226 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 // ----------- S t a t e -------------
2228 // -- x0 : the number of arguments (not including the receiver)
2229 // -- x1 : the function to call (checked to be a JSBoundFunction)
2230 // -----------------------------------
2231 __ AssertBoundFunction(x1);
2232
Ben Murdoch097c5b22016-05-18 11:27:45 +01002233 if (tail_call_mode == TailCallMode::kAllow) {
2234 PrepareForTailCall(masm, x0, x3, x4, x5);
2235 }
2236
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 // Patch the receiver to [[BoundThis]].
2238 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2239 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2240
2241 // Push the [[BoundArguments]] onto the stack.
2242 Generate_PushBoundArguments(masm);
2243
2244 // Call the [[BoundTargetFunction]] via the Call builtin.
2245 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2246 __ Mov(x10,
2247 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2248 __ Ldr(x11, MemOperand(x10));
2249 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2250 __ Br(x12);
2251}
2252
2253
2254// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2256 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 // ----------- S t a t e -------------
2258 // -- x0 : the number of arguments (not including the receiver)
2259 // -- x1 : the target to call (can be any Object).
2260 // -----------------------------------
2261
2262 Label non_callable, non_function, non_smi;
2263 __ JumpIfSmi(x1, &non_callable);
2264 __ Bind(&non_smi);
2265 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002267 RelocInfo::CODE_TARGET, eq);
2268 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002269 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002270 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002271
2272 // Check if target has a [[Call]] internal method.
2273 __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2274 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
2275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002276 __ Cmp(x5, JS_PROXY_TYPE);
2277 __ B(ne, &non_function);
2278
Ben Murdoch097c5b22016-05-18 11:27:45 +01002279 // 0. Prepare for tail call if necessary.
2280 if (tail_call_mode == TailCallMode::kAllow) {
2281 PrepareForTailCall(masm, x0, x3, x4, x5);
2282 }
2283
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002284 // 1. Runtime fallback for Proxy [[Call]].
2285 __ Push(x1);
2286 // Increase the arguments size to include the pushed function and the
2287 // existing receiver on the stack.
2288 __ Add(x0, x0, Operand(2));
2289 // Tail-call to the runtime.
2290 __ JumpToExternalReference(
2291 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2292
2293 // 2. Call to something else, which might have a [[Call]] internal method (if
2294 // not we raise an exception).
2295 __ Bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002296 // Overwrite the original receiver with the (original) target.
2297 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2298 // Let the "call_as_function_delegate" take care of the rest.
2299 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2300 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002301 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002302 RelocInfo::CODE_TARGET);
2303
2304 // 3. Call to something that is not callable.
2305 __ bind(&non_callable);
2306 {
2307 FrameScope scope(masm, StackFrame::INTERNAL);
2308 __ Push(x1);
2309 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2310 }
2311}
2312
2313
2314// static
2315void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2316 // ----------- S t a t e -------------
2317 // -- x0 : the number of arguments (not including the receiver)
2318 // -- x1 : the constructor to call (checked to be a JSFunction)
2319 // -- x3 : the new target (checked to be a constructor)
2320 // -----------------------------------
2321 __ AssertFunction(x1);
2322
2323 // Calling convention for function specific ConstructStubs require
2324 // x2 to contain either an AllocationSite or undefined.
2325 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2326
2327 // Tail call to the function-specific construct stub (still in the caller
2328 // context at this point).
2329 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2330 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2331 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2332 __ Br(x4);
2333}
2334
2335
2336// static
2337void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2338 // ----------- S t a t e -------------
2339 // -- x0 : the number of arguments (not including the receiver)
2340 // -- x1 : the function to call (checked to be a JSBoundFunction)
2341 // -- x3 : the new target (checked to be a constructor)
2342 // -----------------------------------
2343 __ AssertBoundFunction(x1);
2344
2345 // Push the [[BoundArguments]] onto the stack.
2346 Generate_PushBoundArguments(masm);
2347
2348 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2349 {
2350 Label done;
2351 __ Cmp(x1, x3);
2352 __ B(ne, &done);
2353 __ Ldr(x3,
2354 FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2355 __ Bind(&done);
2356 }
2357
2358 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2359 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2360 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2361 __ Ldr(x11, MemOperand(x10));
2362 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2363 __ Br(x12);
2364}
2365
2366
2367// static
2368void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2369 // ----------- S t a t e -------------
2370 // -- x0 : the number of arguments (not including the receiver)
2371 // -- x1 : the constructor to call (checked to be a JSProxy)
2372 // -- x3 : the new target (either the same as the constructor or
2373 // the JSFunction on which new was invoked initially)
2374 // -----------------------------------
2375
2376 // Call into the Runtime for Proxy [[Construct]].
2377 __ Push(x1);
2378 __ Push(x3);
2379 // Include the pushed new_target, constructor and the receiver.
2380 __ Add(x0, x0, 3);
2381 // Tail-call to the runtime.
2382 __ JumpToExternalReference(
2383 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2384}
2385
2386
2387// static
2388void Builtins::Generate_Construct(MacroAssembler* masm) {
2389 // ----------- S t a t e -------------
2390 // -- x0 : the number of arguments (not including the receiver)
2391 // -- x1 : the constructor to call (can be any Object)
2392 // -- x3 : the new target (either the same as the constructor or
2393 // the JSFunction on which new was invoked initially)
2394 // -----------------------------------
2395
2396 // Check if target is a Smi.
2397 Label non_constructor;
2398 __ JumpIfSmi(x1, &non_constructor);
2399
2400 // Dispatch based on instance type.
2401 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2402 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2403 RelocInfo::CODE_TARGET, eq);
2404
2405 // Check if target has a [[Construct]] internal method.
2406 __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
2407 __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
2408
2409 // Only dispatch to bound functions after checking whether they are
2410 // constructors.
2411 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2412 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2413 RelocInfo::CODE_TARGET, eq);
2414
2415 // Only dispatch to proxies after checking whether they are constructors.
2416 __ Cmp(x5, JS_PROXY_TYPE);
2417 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2418 eq);
2419
2420 // Called Construct on an exotic Object with a [[Construct]] internal method.
2421 {
2422 // Overwrite the original receiver with the (original) target.
2423 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2424 // Let the "call_as_constructor_delegate" take care of the rest.
2425 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
2426 __ Jump(masm->isolate()->builtins()->CallFunction(),
2427 RelocInfo::CODE_TARGET);
2428 }
2429
2430 // Called Construct on an Object that doesn't have a [[Construct]] internal
2431 // method.
2432 __ bind(&non_constructor);
2433 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2434 RelocInfo::CODE_TARGET);
2435}
2436
2437
2438// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002439void Builtins::Generate_InterpreterPushArgsAndCallImpl(
2440 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002441 // ----------- S t a t e -------------
2442 // -- x0 : the number of arguments (not including the receiver)
2443 // -- x2 : the address of the first argument to be pushed. Subsequent
2444 // arguments should be consecutive above this, in the same order as
2445 // they are to be pushed onto the stack.
2446 // -- x1 : the target to call (can be any Object).
2447 // -----------------------------------
2448
2449 // Find the address of the last argument.
2450 __ add(x3, x0, Operand(1)); // Add one for receiver.
2451 __ lsl(x3, x3, kPointerSizeLog2);
2452 __ sub(x4, x2, x3);
2453
2454 // Push the arguments.
2455 Label loop_header, loop_check;
2456 __ Mov(x5, jssp);
2457 __ Claim(x3, 1);
2458 __ B(&loop_check);
2459 __ Bind(&loop_header);
2460 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
2461 __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
2462 __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
2463 __ Bind(&loop_check);
2464 __ Cmp(x2, x4);
2465 __ B(gt, &loop_header);
2466
2467 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002468 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2469 tail_call_mode),
2470 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002471}
2472
2473
2474// static
2475void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
2476 // ----------- S t a t e -------------
2477 // -- x0 : argument count (not including receiver)
2478 // -- x3 : new target
2479 // -- x1 : constructor to call
2480 // -- x2 : address of the first argument
2481 // -----------------------------------
2482
2483 // Find the address of the last argument.
2484 __ add(x5, x0, Operand(1)); // Add one for receiver (to be constructed).
2485 __ lsl(x5, x5, kPointerSizeLog2);
2486
2487 // Set stack pointer and where to stop.
2488 __ Mov(x6, jssp);
2489 __ Claim(x5, 1);
2490 __ sub(x4, x6, x5);
2491
2492 // Push a slot for the receiver.
2493 __ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
2494
2495 Label loop_header, loop_check;
2496 // Push the arguments.
2497 __ B(&loop_check);
2498 __ Bind(&loop_header);
2499 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
2500 __ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
2501 __ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
2502 __ Bind(&loop_check);
2503 __ Cmp(x6, x4);
2504 __ B(gt, &loop_header);
2505
2506 // Call the constructor with x0, x1, and x3 unmodified.
2507 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2508}
2509
2510
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002511void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2512 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
2513 // ----------- S t a t e -------------
2514 // -- x0 : actual number of arguments
2515 // -- x1 : function (passed through to callee)
2516 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002517 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002518 // -----------------------------------
2519
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002520 Register argc_actual = x0; // Excluding the receiver.
2521 Register argc_expected = x2; // Excluding the receiver.
2522 Register function = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002523 Register code_entry = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002524
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002525 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002526
2527 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002528 __ Cmp(argc_actual, argc_expected);
2529 __ B(lt, &too_few);
2530 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2531 __ B(eq, &dont_adapt_arguments);
2532
2533 { // Enough parameters: actual >= expected
2534 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002535 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002536
2537 Register copy_start = x10;
2538 Register copy_end = x11;
2539 Register copy_to = x12;
2540 Register scratch1 = x13, scratch2 = x14;
2541
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002542 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002543
2544 // Adjust for fp, lr, and the receiver.
2545 __ Add(copy_start, fp, 3 * kPointerSize);
2546 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002547 __ Sub(copy_end, copy_start, scratch2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002548 __ Sub(copy_end, copy_end, kPointerSize);
2549 __ Mov(copy_to, jssp);
2550
2551 // Claim space for the arguments, the receiver, and one extra slot.
2552 // The extra slot ensures we do not write under jssp. It will be popped
2553 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002554 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002555 __ Claim(scratch1, 1);
2556
2557 // Copy the arguments (including the receiver) to the new stack frame.
2558 Label copy_2_by_2;
2559 __ Bind(&copy_2_by_2);
2560 __ Ldp(scratch1, scratch2,
2561 MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
2562 __ Stp(scratch1, scratch2,
2563 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2564 __ Cmp(copy_start, copy_end);
2565 __ B(hi, &copy_2_by_2);
2566
2567 // Correct the space allocated for the extra slot.
2568 __ Drop(1);
2569
2570 __ B(&invoke);
2571 }
2572
2573 { // Too few parameters: Actual < expected
2574 __ Bind(&too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002575
2576 Register copy_from = x10;
2577 Register copy_end = x11;
2578 Register copy_to = x12;
2579 Register scratch1 = x13, scratch2 = x14;
2580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002581 EnterArgumentsAdaptorFrame(masm);
2582 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2583
2584 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002585 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
2586
2587 // Adjust for fp, lr, and the receiver.
2588 __ Add(copy_from, fp, 3 * kPointerSize);
2589 __ Add(copy_from, copy_from, argc_actual);
2590 __ Mov(copy_to, jssp);
2591 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
2592 __ Sub(copy_end, copy_end, argc_actual);
2593
2594 // Claim space for the arguments, the receiver, and one extra slot.
2595 // The extra slot ensures we do not write under jssp. It will be popped
2596 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002597 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002598 __ Claim(scratch1, 1);
2599
2600 // Copy the arguments (including the receiver) to the new stack frame.
2601 Label copy_2_by_2;
2602 __ Bind(&copy_2_by_2);
2603 __ Ldp(scratch1, scratch2,
2604 MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
2605 __ Stp(scratch1, scratch2,
2606 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2607 __ Cmp(copy_to, copy_end);
2608 __ B(hi, &copy_2_by_2);
2609
2610 __ Mov(copy_to, copy_end);
2611
2612 // Fill the remaining expected arguments with undefined.
2613 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
2614 __ Add(copy_end, jssp, kPointerSize);
2615
2616 Label fill;
2617 __ Bind(&fill);
2618 __ Stp(scratch1, scratch1,
2619 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2620 __ Cmp(copy_to, copy_end);
2621 __ B(hi, &fill);
2622
2623 // Correct the space allocated for the extra slot.
2624 __ Drop(1);
2625 }
2626
2627 // Arguments have been adapted. Now call the entry point.
2628 __ Bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002629 __ Mov(argc_actual, argc_expected);
2630 // x0 : expected number of arguments
2631 // x1 : function (passed through to callee)
2632 // x3 : new target (passed through to callee)
2633 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002634 __ Call(code_entry);
2635
2636 // Store offset of return address for deoptimizer.
2637 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2638
2639 // Exit frame and return.
2640 LeaveArgumentsAdaptorFrame(masm);
2641 __ Ret();
2642
2643 // Call the entry point without adapting the arguments.
2644 __ Bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002645 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 __ Jump(code_entry);
2647
2648 __ Bind(&stack_overflow);
2649 {
2650 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002651 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002652 __ Unreachable();
2653 }
2654}
2655
2656
2657#undef __
2658
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002659} // namespace internal
2660} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002661
2662#endif // V8_TARGET_ARCH_ARM