blob: 11f66a4ef46caa9f2607a40e693c082b14810c8d [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM64
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16
17
18#define __ ACCESS_MASM(masm)
19
20
21// Load the built-in Array function from the current context.
22static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025}
26
27
28// Load the built-in InternalArray function from the current context.
29static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
30 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033}
34
35
36void Builtins::Generate_Adaptor(MacroAssembler* masm,
37 CFunctionId id,
38 BuiltinExtraArguments extra_args) {
39 // ----------- S t a t e -------------
40 // -- x0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 // -- x1 : target
42 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 // -- sp[0] : last argument
44 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000045 // -- sp[4 * (argc - 1)] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046 // -- sp[4 * argc] : receiver
47 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000048 __ AssertFunction(x1);
49
50 // Make sure we operate in the context of the called function (for example
51 // ConstructStubs implemented in C++ will be run in the context of the caller
52 // instead of the callee, due to the way that [[Construct]] is defined for
53 // ordinary functions).
54 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055
56 // Insert extra arguments.
57 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 switch (extra_args) {
59 case BuiltinExtraArguments::kTarget:
60 __ Push(x1);
61 ++num_extra_args;
62 break;
63 case BuiltinExtraArguments::kNewTarget:
64 __ Push(x3);
65 ++num_extra_args;
66 break;
67 case BuiltinExtraArguments::kTargetAndNewTarget:
68 __ Push(x1, x3);
69 num_extra_args += 2;
70 break;
71 case BuiltinExtraArguments::kNone:
72 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 }
74
75 // JumpToExternalReference expects x0 to contain the number of arguments
76 // including the receiver and the extra arguments.
77 __ Add(x0, x0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
80}
81
82
83void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
84 // ----------- S t a t e -------------
85 // -- x0 : number of arguments
86 // -- lr : return address
87 // -- sp[...]: constructor arguments
88 // -----------------------------------
89 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
90 Label generic_array_code;
91
92 // Get the InternalArray function.
93 GenerateLoadInternalArrayFunction(masm, x1);
94
95 if (FLAG_debug_code) {
96 // Initial map for the builtin InternalArray functions should be maps.
97 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
98 __ Tst(x10, kSmiTagMask);
99 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
100 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
102 }
103
104 // Run the native code for the InternalArray function called as a normal
105 // function.
106 InternalArrayConstructorStub stub(masm->isolate());
107 __ TailCallStub(&stub);
108}
109
110
111void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
112 // ----------- S t a t e -------------
113 // -- x0 : number of arguments
114 // -- lr : return address
115 // -- sp[...]: constructor arguments
116 // -----------------------------------
117 ASM_LOCATION("Builtins::Generate_ArrayCode");
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
119
120 // Get the Array function.
121 GenerateLoadArrayFunction(masm, x1);
122
123 if (FLAG_debug_code) {
124 // Initial map for the builtin Array functions should be maps.
125 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
126 __ Tst(x10, kSmiTagMask);
127 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
128 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
129 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130 }
131
132 // Run the native code for the Array function called as a normal function.
133 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 __ Mov(x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 ArrayConstructorStub stub(masm->isolate());
136 __ TailCallStub(&stub);
137}
138
139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100141void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
142 // ----------- S t a t e -------------
143 // -- x0 : number of arguments
144 // -- lr : return address
145 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
146 // -- sp[(argc + 1) * 8] : receiver
147 // -----------------------------------
148 ASM_LOCATION("Builtins::Generate_MathMaxMin");
149
150 Heap::RootListIndex const root_index =
151 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
152 : Heap::kMinusInfinityValueRootIndex;
153
154 // Load the accumulator with the default return value (either -Infinity or
155 // +Infinity), with the tagged value in x1 and the double value in d1.
156 __ LoadRoot(x1, root_index);
157 __ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
158
159 // Remember how many slots to drop (including the receiver).
160 __ Add(x4, x0, 1);
161
162 Label done_loop, loop;
163 __ Bind(&loop);
164 {
165 // Check if all parameters done.
166 __ Subs(x0, x0, 1);
167 __ B(lt, &done_loop);
168
169 // Load the next parameter tagged value into x2.
170 __ Peek(x2, Operand(x0, LSL, kPointerSizeLog2));
171
172 // Load the double value of the parameter into d2, maybe converting the
173 // parameter to a number first using the ToNumberStub if necessary.
174 Label convert_smi, convert_number, done_convert;
175 __ JumpIfSmi(x2, &convert_smi);
176 __ JumpIfHeapNumber(x2, &convert_number);
177 {
178 // Parameter is not a Number, use the ToNumberStub to convert it.
179 FrameScope scope(masm, StackFrame::INTERNAL);
180 __ SmiTag(x0);
181 __ SmiTag(x4);
182 __ Push(x0, x1, x4);
183 __ Mov(x0, x2);
184 ToNumberStub stub(masm->isolate());
185 __ CallStub(&stub);
186 __ Mov(x2, x0);
187 __ Pop(x4, x1, x0);
188 {
189 // Restore the double accumulator value (d1).
190 Label done_restore;
191 __ SmiUntagToDouble(d1, x1, kSpeculativeUntag);
192 __ JumpIfSmi(x1, &done_restore);
193 __ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
194 __ Bind(&done_restore);
195 }
196 __ SmiUntag(x4);
197 __ SmiUntag(x0);
198 }
199 __ AssertNumber(x2);
200 __ JumpIfSmi(x2, &convert_smi);
201
202 __ Bind(&convert_number);
203 __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
204 __ B(&done_convert);
205
206 __ Bind(&convert_smi);
207 __ SmiUntagToDouble(d2, x2);
208 __ Bind(&done_convert);
209
210 // We can use a single fmin/fmax for the operation itself, but we then need
211 // to work out which HeapNumber (or smi) the result came from.
212 __ Fmov(x11, d1);
213 if (kind == MathMaxMinKind::kMin) {
214 __ Fmin(d1, d1, d2);
215 } else {
216 DCHECK(kind == MathMaxMinKind::kMax);
217 __ Fmax(d1, d1, d2);
218 }
219 __ Fmov(x10, d1);
220 __ Cmp(x10, x11);
221 __ Csel(x1, x1, x2, eq);
222 __ B(&loop);
223 }
224
225 __ Bind(&done_loop);
226 __ Mov(x0, x1);
227 __ Drop(x4);
228 __ Ret();
229}
230
231// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000232void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000233 // ----------- S t a t e -------------
234 // -- x0 : number of arguments
235 // -- x1 : constructor function
236 // -- lr : return address
237 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
238 // -- sp[argc * 8] : receiver
239 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 ASM_LOCATION("Builtins::Generate_NumberConstructor");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242 // 1. Load the first argument into x0 and get rid of the rest (including the
243 // receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 Label no_arguments;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000245 {
246 __ Cbz(x0, &no_arguments);
247 __ Sub(x0, x0, 1);
248 __ Drop(x0);
249 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 // 2a. Convert first argument to number.
253 ToNumberStub stub(masm->isolate());
254 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 // 2b. No arguments, return +0 (already in x0).
257 __ Bind(&no_arguments);
258 __ Drop(1);
259 __ Ret();
260}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000261
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263// static
264void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
265 // ----------- S t a t e -------------
266 // -- x0 : number of arguments
267 // -- x1 : constructor function
268 // -- x3 : new target
269 // -- lr : return address
270 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
271 // -- sp[argc * 8] : receiver
272 // -----------------------------------
273 ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
274
275 // 1. Make sure we operate in the context of the called function.
276 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
277
278 // 2. Load the first argument into x2 and get rid of the rest (including the
279 // receiver).
280 {
281 Label no_arguments, done;
282 __ Cbz(x0, &no_arguments);
283 __ Sub(x0, x0, 1);
284 __ Drop(x0);
285 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
286 __ B(&done);
287 __ Bind(&no_arguments);
288 __ Drop(1);
289 __ Mov(x2, Smi::FromInt(0));
290 __ Bind(&done);
291 }
292
293 // 3. Make sure x2 is a number.
294 {
295 Label done_convert;
296 __ JumpIfSmi(x2, &done_convert);
297 __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
298 {
299 FrameScope scope(masm, StackFrame::INTERNAL);
300 __ Push(x1, x3);
301 __ Move(x0, x2);
302 ToNumberStub stub(masm->isolate());
303 __ CallStub(&stub);
304 __ Move(x2, x0);
305 __ Pop(x3, x1);
306 }
307 __ Bind(&done_convert);
308 }
309
310 // 4. Check if new target and constructor differ.
311 Label new_object;
312 __ Cmp(x1, x3);
313 __ B(ne, &new_object);
314
315 // 5. Allocate a JSValue wrapper for the number.
316 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 __ Ret();
318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 // 6. Fallback to the runtime to create new object.
320 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000321 {
322 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100323 __ Push(x2); // first argument
324 FastNewObjectStub stub(masm->isolate());
325 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
329 __ Ret();
330}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332
333// static
334void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
335 // ----------- S t a t e -------------
336 // -- x0 : number of arguments
337 // -- x1 : constructor function
338 // -- lr : return address
339 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
340 // -- sp[argc * 8] : receiver
341 // -----------------------------------
342 ASM_LOCATION("Builtins::Generate_StringConstructor");
343
344 // 1. Load the first argument into x0 and get rid of the rest (including the
345 // receiver).
346 Label no_arguments;
347 {
348 __ Cbz(x0, &no_arguments);
349 __ Sub(x0, x0, 1);
350 __ Drop(x0);
351 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
352 }
353
354 // 2a. At least one argument, return x0 if it's a string, otherwise
355 // dispatch to appropriate conversion.
356 Label to_string, symbol_descriptive_string;
357 {
358 __ JumpIfSmi(x0, &to_string);
359 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
360 __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
361 __ B(hi, &to_string);
362 __ B(eq, &symbol_descriptive_string);
363 __ Ret();
364 }
365
366 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 __ Bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 {
369 __ LoadRoot(x0, Heap::kempty_stringRootIndex);
370 __ Drop(1);
371 __ Ret();
372 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000373
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 // 3a. Convert x0 to a string.
375 __ Bind(&to_string);
376 {
377 ToStringStub stub(masm->isolate());
378 __ TailCallStub(&stub);
379 }
380
381 // 3b. Convert symbol in x0 to a string.
382 __ Bind(&symbol_descriptive_string);
383 {
384 __ Push(x0);
385 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
386 }
387}
388
389
390// static
391void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
392 // ----------- S t a t e -------------
393 // -- x0 : number of arguments
394 // -- x1 : constructor function
395 // -- x3 : new target
396 // -- lr : return address
397 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
398 // -- sp[argc * 8] : receiver
399 // -----------------------------------
400 ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
401
402 // 1. Make sure we operate in the context of the called function.
403 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
404
405 // 2. Load the first argument into x2 and get rid of the rest (including the
406 // receiver).
407 {
408 Label no_arguments, done;
409 __ Cbz(x0, &no_arguments);
410 __ Sub(x0, x0, 1);
411 __ Drop(x0);
412 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
413 __ B(&done);
414 __ Bind(&no_arguments);
415 __ Drop(1);
416 __ LoadRoot(x2, Heap::kempty_stringRootIndex);
417 __ Bind(&done);
418 }
419
420 // 3. Make sure x2 is a string.
421 {
422 Label convert, done_convert;
423 __ JumpIfSmi(x2, &convert);
424 __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
425 __ Bind(&convert);
426 {
427 FrameScope scope(masm, StackFrame::INTERNAL);
428 ToStringStub stub(masm->isolate());
429 __ Push(x1, x3);
430 __ Move(x0, x2);
431 __ CallStub(&stub);
432 __ Move(x2, x0);
433 __ Pop(x3, x1);
434 }
435 __ Bind(&done_convert);
436 }
437
438 // 4. Check if new target and constructor differ.
439 Label new_object;
440 __ Cmp(x1, x3);
441 __ B(ne, &new_object);
442
443 // 5. Allocate a JSValue wrapper for the string.
444 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
445 __ Ret();
446
447 // 6. Fallback to the runtime to create new object.
448 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 {
450 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100451 __ Push(x2); // first argument
452 FastNewObjectStub stub(masm->isolate());
453 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000454 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 __ Ret();
458}
459
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
461 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
462 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
463 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
464 __ Br(x2);
465}
466
Ben Murdoch097c5b22016-05-18 11:27:45 +0100467static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
468 Runtime::FunctionId function_id) {
469 // ----------- S t a t e -------------
470 // -- x0 : argument count (preserved for callee)
471 // -- x1 : target function (preserved for callee)
472 // -- x3 : new target (preserved for callee)
473 // -----------------------------------
474 {
475 FrameScope scope(masm, StackFrame::INTERNAL);
476 // Push a copy of the target function and the new target.
477 // Push another copy as a parameter to the runtime call.
478 __ SmiTag(x0);
479 __ Push(x0, x1, x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481 __ CallRuntime(function_id, 1);
482 __ Move(x2, x0);
483
484 // Restore target function and new target.
485 __ Pop(x3, x1, x0);
486 __ SmiUntag(x0);
487 }
488
489 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
490 __ Br(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000491}
492
493
494void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
495 // Checking whether the queued function is ready for install is optional,
496 // since we come across interrupts and stack checks elsewhere. However, not
497 // checking may delay installing ready functions, and always checking would be
498 // quite expensive. A good compromise is to first check against stack limit as
499 // a cue for an interrupt signal.
500 Label ok;
501 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
502 __ B(hs, &ok);
503
Ben Murdoch097c5b22016-05-18 11:27:45 +0100504 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505
506 __ Bind(&ok);
507 GenerateTailCallToSharedCode(masm);
508}
509
510
511static void Generate_JSConstructStubHelper(MacroAssembler* masm,
512 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100513 bool create_implicit_receiver,
514 bool check_derived_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515 // ----------- S t a t e -------------
516 // -- x0 : number of arguments
517 // -- x1 : constructor function
518 // -- x2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 // -- lr : return address
521 // -- sp[...]: constructor arguments
522 // -----------------------------------
523
524 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525
526 Isolate* isolate = masm->isolate();
527
528 // Enter a construct frame.
529 {
530 FrameScope scope(masm, StackFrame::CONSTRUCT);
531
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000532 // Preserve the four incoming parameters on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000533 Register argc = x0;
534 Register constructor = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535 Register allocation_site = x2;
536 Register new_target = x3;
537
538 // Preserve the incoming parameters on the stack.
539 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000540 __ SmiTag(argc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 __ Push(allocation_site, argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100544 // Allocate the new receiver object.
545 __ Push(constructor, new_target);
546 FastNewObjectStub stub(masm->isolate());
547 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000548 __ Mov(x4, x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 __ Pop(new_target, constructor);
550
Ben Murdoch097c5b22016-05-18 11:27:45 +0100551 // ----------- S t a t e -------------
552 // -- x1: constructor function
553 // -- x3: new target
554 // -- x4: newly allocated object
555 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556
557 // Reload the number of arguments from the stack.
558 // Set it up in x0 for the function call below.
559 // jssp[0]: number of arguments (smi-tagged)
560 __ Peek(argc, 0); // Load number of arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561 }
562
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 __ SmiUntag(argc);
564
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565 if (create_implicit_receiver) {
566 // Push the allocated receiver to the stack. We need two copies
567 // because we may have to return the original one and the calling
568 // conventions dictate that the called function pops the receiver.
569 __ Push(x4, x4);
570 } else {
571 __ PushRoot(Heap::kTheHoleValueRootIndex);
572 }
573
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 // Set up pointer to last argument.
575 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
576
577 // Copy arguments and receiver to the expression stack.
578 // Copy 2 values every loop to use ldp/stp.
579 // x0: number of arguments
580 // x1: constructor function
581 // x2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000582 // x3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583 // jssp[0]: receiver
584 // jssp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000585 // jssp[2]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586 // Compute the start address of the copy in x3.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000587 __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000588 Label loop, entry, done_copying_arguments;
589 __ B(&entry);
590 __ Bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 __ Push(x11, x10);
593 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594 __ Cmp(x4, x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 __ B(gt, &loop);
596 // Because we copied values 2 by 2 we may have copied one extra value.
597 // Drop it if that is the case.
598 __ B(eq, &done_copying_arguments);
599 __ Drop(1);
600 __ Bind(&done_copying_arguments);
601
602 // Call the function.
603 // x0: number of arguments
604 // x1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 // x3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000606 if (is_api_function) {
607 __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset));
608 Handle<Code> code =
609 masm->isolate()->builtins()->HandleApiCallConstruct();
610 __ Call(code, RelocInfo::CODE_TARGET);
611 } else {
612 ParameterCount actual(argc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613 __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
614 CheckDebugStepCallWrapper());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 }
616
617 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000618 if (create_implicit_receiver && !is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
620 }
621
622 // Restore the context from the frame.
623 // x0: result
624 // jssp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000625 // jssp[1]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
627
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000628 if (create_implicit_receiver) {
629 // If the result is an object (in the ECMA sense), we should get rid
630 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
631 // on page 74.
632 Label use_receiver, exit;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000633
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 // If the result is a smi, it is *not* an object in the ECMA sense.
635 // x0: result
636 // jssp[0]: receiver (newly allocated object)
637 // jssp[1]: number of arguments (smi-tagged)
638 __ JumpIfSmi(x0, &use_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000640 // If the type of the result (stored in its map) is less than
641 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
642 __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 // Throw away the result of the constructor invocation and use the
645 // on-stack receiver as the result.
646 __ Bind(&use_receiver);
647 __ Peek(x0, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000649 // Remove the receiver from the stack, remove caller arguments, and
650 // return.
651 __ Bind(&exit);
652 // x0: result
653 // jssp[0]: receiver (newly allocated object)
654 // jssp[1]: number of arguments (smi-tagged)
655 __ Peek(x1, 1 * kXRegSize);
656 } else {
657 __ Peek(x1, 0);
658 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000659
660 // Leave construct frame.
661 }
662
Ben Murdoch097c5b22016-05-18 11:27:45 +0100663 // ES6 9.2.2. Step 13+
664 // Check that the result is not a Smi, indicating that the constructor result
665 // from a derived class is neither undefined nor an Object.
666 if (check_derived_construct) {
667 Label dont_throw;
668 __ JumpIfNotSmi(x0, &dont_throw);
669 {
670 FrameScope scope(masm, StackFrame::INTERNAL);
671 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
672 }
673 __ Bind(&dont_throw);
674 }
675
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 __ DropBySMI(x1);
677 __ Drop(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000678 if (create_implicit_receiver) {
679 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
680 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 __ Ret();
682}
683
684
685void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100686 Generate_JSConstructStubHelper(masm, false, true, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000687}
688
689
690void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000692}
693
694
695void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100696 Generate_JSConstructStubHelper(masm, false, false, false);
697}
698
699
700void Builtins::Generate_JSBuiltinsConstructStubForDerived(
701 MacroAssembler* masm) {
702 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703}
704
705
706void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
707 FrameScope scope(masm, StackFrame::INTERNAL);
708 __ Push(x1);
709 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
710}
711
712
713enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
714
715
716// Clobbers x10, x15; preserves all other registers.
717static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
718 IsTagged argc_is_tagged) {
719 // Check the stack for overflow.
720 // We are not trying to catch interruptions (e.g. debug break and
721 // preemption) here, so the "real stack limit" is checked.
722 Label enough_stack_space;
723 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
724 // Make x10 the space we have left. The stack might already be overflowed
725 // here which will cause x10 to become negative.
726 // TODO(jbramley): Check that the stack usage here is safe.
727 __ Sub(x10, jssp, x10);
728 // Check if the arguments will overflow the stack.
729 if (argc_is_tagged == kArgcIsSmiTagged) {
730 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
731 } else {
732 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
733 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
734 }
735 __ B(gt, &enough_stack_space);
736 __ CallRuntime(Runtime::kThrowStackOverflow);
737 // We should never return from the APPLY_OVERFLOW builtin.
738 if (__ emit_debug_code()) {
739 __ Unreachable();
740 }
741
742 __ Bind(&enough_stack_space);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000743}
744
745
746// Input:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747// x0: new.target.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000748// x1: function.
749// x2: receiver.
750// x3: argc.
751// x4: argv.
752// Output:
753// x0: result.
754static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
755 bool is_construct) {
756 // Called from JSEntryStub::GenerateBody().
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000757 Register new_target = x0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000758 Register function = x1;
759 Register receiver = x2;
760 Register argc = x3;
761 Register argv = x4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000762 Register scratch = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763
764 ProfileEntryHookStub::MaybeCallEntryHook(masm);
765
766 // Clear the context before we push it when entering the internal frame.
767 __ Mov(cp, 0);
768
769 {
770 // Enter an internal frame.
771 FrameScope scope(masm, StackFrame::INTERNAL);
772
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000773 // Setup the context (we need to use the caller context from the isolate).
774 __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
775 masm->isolate())));
776 __ Ldr(cp, MemOperand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000777
778 __ InitializeRootRegister();
779
780 // Push the function and the receiver onto the stack.
781 __ Push(function, receiver);
782
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000783 // Check if we have enough stack space to push all arguments.
784 // Expects argument count in eax. Clobbers ecx, edx, edi.
785 Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
786
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787 // Copy arguments to the stack in a loop, in reverse order.
788 // x3: argc.
789 // x4: argv.
790 Label loop, entry;
791 // Compute the copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000792 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000793
794 __ B(&entry);
795 __ Bind(&loop);
796 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
797 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
798 __ Push(x12); // Push the argument.
799 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800 __ Cmp(scratch, argv);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 __ B(ne, &loop);
802
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000803 __ Mov(scratch, argc);
804 __ Mov(argc, new_target);
805 __ Mov(new_target, scratch);
806 // x0: argc.
807 // x3: new.target.
808
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000809 // Initialize all JavaScript callee-saved registers, since they will be seen
810 // by the garbage collector as part of handlers.
811 // The original values have been saved in JSEntryStub::GenerateBody().
812 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
813 __ Mov(x20, x19);
814 __ Mov(x21, x19);
815 __ Mov(x22, x19);
816 __ Mov(x23, x19);
817 __ Mov(x24, x19);
818 __ Mov(x25, x19);
819 // Don't initialize the reserved registers.
820 // x26 : root register (root).
821 // x27 : context pointer (cp).
822 // x28 : JS stack pointer (jssp).
823 // x29 : frame pointer (fp).
824
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000825 Handle<Code> builtin = is_construct
826 ? masm->isolate()->builtins()->Construct()
827 : masm->isolate()->builtins()->Call();
828 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000829
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000830 // Exit the JS internal frame and remove the parameters (except function),
831 // and return.
832 }
833
834 // Result is in x0. Return.
835 __ Ret();
836}
837
838
839void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
840 Generate_JSEntryTrampolineHelper(masm, false);
841}
842
843
844void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
845 Generate_JSEntryTrampolineHelper(masm, true);
846}
847
848
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000849// Generate code for entering a JS function with the interpreter.
850// On entry to the function the receiver and arguments have been pushed on the
851// stack left to right. The actual argument count matches the formal parameter
852// count expected by the function.
853//
854// The live registers are:
855// - x1: the JS function object being called.
856// - x3: the new target
857// - cp: our context.
858// - fp: our caller's frame pointer.
859// - jssp: stack pointer.
860// - lr: return address.
861//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100862// The function builds an interpreter frame. See InterpreterFrameConstants in
863// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000864void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
865 // Open a frame scope to indicate that there is a frame on the stack. The
866 // MANUAL indicates that the scope shouldn't actually generate code to set up
867 // the frame (that is done below).
868 FrameScope frame_scope(masm, StackFrame::MANUAL);
869 __ Push(lr, fp, cp, x1);
870 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000871
872 // Get the bytecode array from the function object and load the pointer to the
873 // first entry into kInterpreterBytecodeRegister.
874 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100875 Register debug_info = kInterpreterBytecodeArrayRegister;
876 Label load_debug_bytecode_array, bytecode_array_loaded;
877 DCHECK(!debug_info.is(x0));
878 __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
879 __ Cmp(debug_info, Operand(DebugInfo::uninitialized()));
880 __ B(ne, &load_debug_bytecode_array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000881 __ Ldr(kInterpreterBytecodeArrayRegister,
882 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100883 __ Bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000884
885 if (FLAG_debug_code) {
886 // Check function data field is actually a BytecodeArray object.
887 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
888 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
889 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
890 BYTECODE_ARRAY_TYPE);
891 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
892 }
893
Ben Murdoch097c5b22016-05-18 11:27:45 +0100894 // Push new.target, bytecode array and zero for bytecode array offset.
895 __ Mov(x0, Operand(0));
896 __ Push(x3, kInterpreterBytecodeArrayRegister, x0);
897
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000898 // Allocate the local and temporary register file on the stack.
899 {
900 // Load frame size from the BytecodeArray object.
901 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
902 BytecodeArray::kFrameSizeOffset));
903
904 // Do a stack check to ensure we don't go over the limit.
905 Label ok;
906 DCHECK(jssp.Is(__ StackPointer()));
907 __ Sub(x10, jssp, Operand(x11));
908 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
909 __ B(hs, &ok);
910 __ CallRuntime(Runtime::kThrowStackOverflow);
911 __ Bind(&ok);
912
913 // If ok, push undefined as the initial value for all register file entries.
914 // Note: there should always be at least one stack slot for the return
915 // register in the register file.
916 Label loop_header;
917 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
918 // TODO(rmcilroy): Ensure we always have an even number of registers to
919 // allow stack to be 16 bit aligned (and remove need for jssp).
920 __ Lsr(x11, x11, kPointerSizeLog2);
921 __ PushMultipleTimes(x10, x11);
922 __ Bind(&loop_header);
923 }
924
925 // TODO(rmcilroy): List of things not currently dealt with here but done in
926 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000927 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000928 // - Code aging of the BytecodeArray object.
929
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000930 // Load accumulator, register file, bytecode offset, dispatch table into
931 // registers.
932 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
933 __ Add(kInterpreterRegisterFileRegister, fp,
934 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
935 __ Mov(kInterpreterBytecodeOffsetRegister,
936 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100937 __ Mov(kInterpreterDispatchTableRegister,
938 Operand(ExternalReference::interpreter_dispatch_table_address(
939 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000940
941 // Dispatch to the first bytecode handler for the function.
942 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
943 kInterpreterBytecodeOffsetRegister));
944 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
945 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
946 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
947 // and header removal.
948 __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
949 __ Call(ip0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100950
951 // Even though the first bytecode handler was called, we will never return.
952 __ Abort(kUnexpectedReturnFromBytecodeHandler);
953
954 // Load debug copy of the bytecode array.
955 __ Bind(&load_debug_bytecode_array);
956 __ Ldr(kInterpreterBytecodeArrayRegister,
957 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
958 __ B(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000959}
960
961
962void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
963 // TODO(rmcilroy): List of things not currently dealt with here but done in
964 // fullcodegen's EmitReturnSequence.
965 // - Supporting FLAG_trace for Runtime::TraceExit.
966 // - Support profiler (specifically decrementing profiling_counter
967 // appropriately and calling out to HandleInterrupts if necessary).
968
969 // The return value is in accumulator, which is already in x0.
970
971 // Leave the frame (also dropping the register file).
972 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
973
974 // Drop receiver + arguments and return.
975 __ Ldr(w1, FieldMemOperand(kInterpreterBytecodeArrayRegister,
976 BytecodeArray::kParameterSizeOffset));
977 __ Drop(x1, 1);
978 __ Ret();
979}
980
981
Ben Murdoch097c5b22016-05-18 11:27:45 +0100982static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000983 // Initialize register file register and dispatch table register.
984 __ Add(kInterpreterRegisterFileRegister, fp,
985 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100986 __ Mov(kInterpreterDispatchTableRegister,
987 Operand(ExternalReference::interpreter_dispatch_table_address(
988 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989
990 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000991 __ Ldr(kContextRegister,
992 MemOperand(kInterpreterRegisterFileRegister,
993 InterpreterFrameConstants::kContextFromRegisterPointer));
994
995 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100996 __ Ldr(
997 kInterpreterBytecodeArrayRegister,
998 MemOperand(kInterpreterRegisterFileRegister,
999 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001000
1001 if (FLAG_debug_code) {
1002 // Check function data field is actually a BytecodeArray object.
1003 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1004 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1005 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1006 BYTECODE_ARRAY_TYPE);
1007 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1008 }
1009
1010 // Get the target bytecode offset from the frame.
1011 __ Ldr(kInterpreterBytecodeOffsetRegister,
1012 MemOperand(
1013 kInterpreterRegisterFileRegister,
1014 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1015 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1016
1017 // Dispatch to the target bytecode.
1018 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1019 kInterpreterBytecodeOffsetRegister));
1020 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1021 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1022 __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
1023 __ Jump(ip0);
1024}
1025
1026
Ben Murdoch097c5b22016-05-18 11:27:45 +01001027static void Generate_InterpreterNotifyDeoptimizedHelper(
1028 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1029 // Enter an internal frame.
1030 {
1031 FrameScope scope(masm, StackFrame::INTERNAL);
1032
1033 // Pass the deoptimization type to the runtime system.
1034 __ Mov(x1, Operand(Smi::FromInt(static_cast<int>(type))));
1035 __ Push(x1);
1036 __ CallRuntime(Runtime::kNotifyDeoptimized);
1037 // Tear down internal frame.
1038 }
1039
1040 // Drop state (we don't use these for interpreter deopts) and and pop the
1041 // accumulator value into the accumulator register.
1042 __ Drop(1);
1043 __ Pop(kInterpreterAccumulatorRegister);
1044
1045 // Enter the bytecode dispatch.
1046 Generate_EnterBytecodeDispatch(masm);
1047}
1048
1049
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001050void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1051 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1052}
1053
1054
1055void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1056 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1057}
1058
1059
1060void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1061 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1062}
1063
Ben Murdoch097c5b22016-05-18 11:27:45 +01001064void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1065 // Set the address of the interpreter entry trampoline as a return address.
1066 // This simulates the initial call to bytecode handlers in interpreter entry
1067 // trampoline. The return will never actually be taken, but our stack walker
1068 // uses this address to determine whether a frame is interpreted.
1069 __ LoadObject(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1070
1071 Generate_EnterBytecodeDispatch(masm);
1072}
1073
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001076 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077}
1078
1079
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001081 GenerateTailCallToReturnedCode(masm,
1082 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001083}
1084
1085
1086void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001087 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001088}
1089
1090
1091static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1092 // For now, we are relying on the fact that make_code_young doesn't do any
1093 // garbage collection which allows us to save/restore the registers without
1094 // worrying about which of them contain pointers. We also don't build an
1095 // internal frame to make the code fast, since we shouldn't have to do stack
1096 // crawls in MakeCodeYoung. This seems a bit fragile.
1097
1098 // The following caller-saved registers must be saved and restored when
1099 // calling through to the runtime:
1100 // x0 - The address from which to resume execution.
1101 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001102 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001103 // lr - The return address for the JSFunction itself. It has not yet been
1104 // preserved on the stack because the frame setup code was replaced
1105 // with a call to this stub, to handle code ageing.
1106 {
1107 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1110 __ CallCFunction(
1111 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001112 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001113 }
1114
1115 // The calling function has been made young again, so return to execute the
1116 // real frame set-up code.
1117 __ Br(x0);
1118}
1119
1120#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1121void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1122 MacroAssembler* masm) { \
1123 GenerateMakeCodeYoungAgainCommon(masm); \
1124} \
1125void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1126 MacroAssembler* masm) { \
1127 GenerateMakeCodeYoungAgainCommon(masm); \
1128}
1129CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1130#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1131
1132
1133void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1134 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1135 // that make_code_young doesn't do any garbage collection which allows us to
1136 // save/restore the registers without worrying about which of them contain
1137 // pointers.
1138
1139 // The following caller-saved registers must be saved and restored when
1140 // calling through to the runtime:
1141 // x0 - The address from which to resume execution.
1142 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001143 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001144 // lr - The return address for the JSFunction itself. It has not yet been
1145 // preserved on the stack because the frame setup code was replaced
1146 // with a call to this stub, to handle code ageing.
1147 {
1148 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1151 __ CallCFunction(
1152 ExternalReference::get_mark_code_as_executed_function(
1153 masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001155
1156 // Perform prologue operations usually performed by the young code stub.
1157 __ EmitFrameSetupForCodeAgePatching(masm);
1158 }
1159
1160 // Jump to point after the code-age stub.
1161 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1162 __ Br(x0);
1163}
1164
1165
1166void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1167 GenerateMakeCodeYoungAgainCommon(masm);
1168}
1169
1170
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001171void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1172 Generate_MarkCodeAsExecutedOnce(masm);
1173}
1174
1175
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001176static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1177 SaveFPRegsMode save_doubles) {
1178 {
1179 FrameScope scope(masm, StackFrame::INTERNAL);
1180
1181 // Preserve registers across notification, this is important for compiled
1182 // stubs that tail call the runtime on deopts passing their parameters in
1183 // registers.
1184 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1185 // registers here? According to the comment above, we should only need to
1186 // preserve the registers with parameters.
1187 __ PushXRegList(kSafepointSavedRegisters);
1188 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001190 __ PopXRegList(kSafepointSavedRegisters);
1191 }
1192
1193 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1194 __ Drop(1);
1195
1196 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1197 // into lr before it jumps here.
1198 __ Br(lr);
1199}
1200
1201
1202void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1203 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1204}
1205
1206
1207void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1208 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1209}
1210
1211
1212static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1213 Deoptimizer::BailoutType type) {
1214 {
1215 FrameScope scope(masm, StackFrame::INTERNAL);
1216 // Pass the deoptimization type to the runtime system.
1217 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1218 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001219 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001220 }
1221
1222 // Get the full codegen state from the stack and untag it.
1223 Register state = x6;
1224 __ Peek(state, 0);
1225 __ SmiUntag(state);
1226
1227 // Switch on the state.
1228 Label with_tos_register, unknown_state;
1229 __ CompareAndBranch(
1230 state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register);
1231 __ Drop(1); // Remove state.
1232 __ Ret();
1233
1234 __ Bind(&with_tos_register);
1235 // Reload TOS register.
1236 __ Peek(x0, kPointerSize);
1237 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state);
1238 __ Drop(2); // Remove state and TOS.
1239 __ Ret();
1240
1241 __ Bind(&unknown_state);
1242 __ Abort(kInvalidFullCodegenState);
1243}
1244
1245
1246void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1247 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1248}
1249
1250
1251void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1252 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1253}
1254
1255
1256void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1257 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1258}
1259
1260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001261static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1262 Register function_template_info,
1263 Register scratch0, Register scratch1,
1264 Register scratch2,
1265 Label* receiver_check_failed) {
1266 Register signature = scratch0;
1267 Register map = scratch1;
1268 Register constructor = scratch2;
1269
1270 // If there is no signature, return the holder.
1271 __ Ldr(signature, FieldMemOperand(function_template_info,
1272 FunctionTemplateInfo::kSignatureOffset));
1273 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1274 Label receiver_check_passed;
1275 __ B(eq, &receiver_check_passed);
1276
1277 // Walk the prototype chain.
1278 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1279 Label prototype_loop_start;
1280 __ Bind(&prototype_loop_start);
1281
1282 // Get the constructor, if any
1283 __ GetMapConstructor(constructor, map, x16, x16);
1284 __ cmp(x16, Operand(JS_FUNCTION_TYPE));
1285 Label next_prototype;
1286 __ B(ne, &next_prototype);
1287 Register type = constructor;
1288 __ Ldr(type,
1289 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1290 __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1291
1292 // Loop through the chain of inheriting function templates.
1293 Label function_template_loop;
1294 __ Bind(&function_template_loop);
1295
1296 // If the signatures match, we have a compatible receiver.
1297 __ Cmp(signature, type);
1298 __ B(eq, &receiver_check_passed);
1299
1300 // If the current type is not a FunctionTemplateInfo, load the next prototype
1301 // in the chain.
1302 __ JumpIfSmi(type, &next_prototype);
1303 __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
1304 __ B(ne, &next_prototype);
1305
1306 // Otherwise load the parent function template and iterate.
1307 __ Ldr(type,
1308 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1309 __ B(&function_template_loop);
1310
1311 // Load the next prototype.
1312 __ Bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001313 __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001314 __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315 __ B(eq, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001316 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1317 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001318 // Iterate.
1319 __ B(&prototype_loop_start);
1320
1321 __ Bind(&receiver_check_passed);
1322}
1323
1324
1325void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1326 // ----------- S t a t e -------------
1327 // -- x0 : number of arguments excluding receiver
1328 // -- x1 : callee
1329 // -- lr : return address
1330 // -- sp[0] : last argument
1331 // -- ...
1332 // -- sp[8 * (argc - 1)] : first argument
1333 // -- sp[8 * argc] : receiver
1334 // -----------------------------------
1335
1336 // Load the FunctionTemplateInfo.
1337 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1338 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
1339
1340 // Do the compatible receiver check.
1341 Label receiver_check_failed;
1342 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1343 CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
1344
1345 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1346 // beginning of the code.
1347 __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
1348 __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
1349 __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
1350 __ Jump(x4);
1351
1352 // Compatible receiver check failed: throw an Illegal Invocation exception.
1353 __ Bind(&receiver_check_failed);
1354 // Drop the arguments (including the receiver)
1355 __ add(x0, x0, Operand(1));
1356 __ Drop(x0);
1357 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1358}
1359
1360
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001361void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1362 // Lookup the function in the JavaScript frame.
1363 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1364 {
1365 FrameScope scope(masm, StackFrame::INTERNAL);
1366 // Pass function as argument.
1367 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001369 }
1370
1371 // If the code object is null, just return to the unoptimized code.
1372 Label skip;
1373 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1374 __ Ret();
1375
1376 __ Bind(&skip);
1377
1378 // Load deoptimization data from the code object.
1379 // <deopt_data> = <code>[#deoptimization_data_offset]
1380 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1381
1382 // Load the OSR entrypoint offset from the deoptimization data.
1383 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1384 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1385 DeoptimizationInputData::kOsrPcOffsetIndex)));
1386
1387 // Compute the target address = code_obj + header_size + osr_offset
1388 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1389 __ Add(x0, x0, x1);
1390 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1391
1392 // And "return" to the OSR entry point of the function.
1393 __ Ret();
1394}
1395
1396
1397void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1398 // We check the stack limit as indicator that recompilation might be done.
1399 Label ok;
1400 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1401 __ B(hs, &ok);
1402 {
1403 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001404 __ CallRuntime(Runtime::kStackGuard);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001405 }
1406 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1407 RelocInfo::CODE_TARGET);
1408
1409 __ Bind(&ok);
1410 __ Ret();
1411}
1412
1413
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001414// static
1415void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1416 int field_index) {
1417 // ----------- S t a t e -------------
1418 // -- lr : return address
1419 // -- jssp[0] : receiver
1420 // -----------------------------------
1421 ASM_LOCATION("Builtins::Generate_DatePrototype_GetField");
1422
1423 // 1. Pop receiver into x0 and check that it's actually a JSDate object.
1424 Label receiver_not_date;
1425 {
1426 __ Pop(x0);
1427 __ JumpIfSmi(x0, &receiver_not_date);
1428 __ JumpIfNotObjectType(x0, x1, x2, JS_DATE_TYPE, &receiver_not_date);
1429 }
1430
1431 // 2. Load the specified date field, falling back to the runtime as necessary.
1432 if (field_index == JSDate::kDateValue) {
1433 __ Ldr(x0, FieldMemOperand(x0, JSDate::kValueOffset));
1434 } else {
1435 if (field_index < JSDate::kFirstUncachedField) {
1436 Label stamp_mismatch;
1437 __ Mov(x1, ExternalReference::date_cache_stamp(masm->isolate()));
1438 __ Ldr(x1, MemOperand(x1));
1439 __ Ldr(x2, FieldMemOperand(x0, JSDate::kCacheStampOffset));
1440 __ Cmp(x1, x2);
1441 __ B(ne, &stamp_mismatch);
1442 __ Ldr(x0, FieldMemOperand(
1443 x0, JSDate::kValueOffset + field_index * kPointerSize));
1444 __ Ret();
1445 __ Bind(&stamp_mismatch);
1446 }
1447 FrameScope scope(masm, StackFrame::INTERNAL);
1448 __ Mov(x1, Smi::FromInt(field_index));
1449 __ CallCFunction(
1450 ExternalReference::get_date_field_function(masm->isolate()), 2);
1451 }
1452 __ Ret();
1453
1454 // 3. Raise a TypeError if the receiver is not a date.
1455 __ Bind(&receiver_not_date);
1456 __ TailCallRuntime(Runtime::kThrowNotDateError);
1457}
1458
1459
1460// static
1461void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1462 // ----------- S t a t e -------------
1463 // -- x0 : argc
1464 // -- jssp[0] : argArray (if argc == 2)
1465 // -- jssp[8] : thisArg (if argc >= 1)
1466 // -- jssp[16] : receiver
1467 // -----------------------------------
1468 ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
1469
1470 Register argc = x0;
1471 Register arg_array = x0;
1472 Register receiver = x1;
1473 Register this_arg = x2;
1474 Register undefined_value = x3;
1475 Register null_value = x4;
1476
1477 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1478 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1479
1480 // 1. Load receiver into x1, argArray into x0 (if present), remove all
1481 // arguments from the stack (including the receiver), and push thisArg (if
1482 // present) instead.
1483 {
1484 // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
1485 // consistent state for a simple pop operation.
1486 __ Claim(2);
1487 __ Drop(argc);
1488
1489 // ----------- S t a t e -------------
1490 // -- x0 : argc
1491 // -- jssp[0] : argArray (dummy value if argc <= 1)
1492 // -- jssp[8] : thisArg (dummy value if argc == 0)
1493 // -- jssp[16] : receiver
1494 // -----------------------------------
1495 __ Cmp(argc, 1);
1496 __ Pop(arg_array, this_arg); // Overwrites argc.
1497 __ CmovX(this_arg, undefined_value, lo); // undefined if argc == 0.
1498 __ CmovX(arg_array, undefined_value, ls); // undefined if argc <= 1.
1499
1500 __ Peek(receiver, 0);
1501 __ Poke(this_arg, 0);
1502 }
1503
1504 // ----------- S t a t e -------------
1505 // -- x0 : argArray
1506 // -- x1 : receiver
1507 // -- x3 : undefined root value
1508 // -- jssp[0] : thisArg
1509 // -----------------------------------
1510
1511 // 2. Make sure the receiver is actually callable.
1512 Label receiver_not_callable;
1513 __ JumpIfSmi(receiver, &receiver_not_callable);
1514 __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1515 __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
1516 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
1517 &receiver_not_callable);
1518
1519 // 3. Tail call with no arguments if argArray is null or undefined.
1520 Label no_arguments;
1521 __ Cmp(arg_array, null_value);
1522 __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1523 __ B(eq, &no_arguments);
1524
1525 // 4a. Apply the receiver to the given argArray (passing undefined for
1526 // new.target in x3).
1527 DCHECK(undefined_value.Is(x3));
1528 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1529
1530 // 4b. The argArray is either null or undefined, so we tail call without any
1531 // arguments to the receiver.
1532 __ Bind(&no_arguments);
1533 {
1534 __ Mov(x0, 0);
1535 DCHECK(receiver.Is(x1));
1536 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1537 }
1538
1539 // 4c. The receiver is not callable, throw an appropriate TypeError.
1540 __ Bind(&receiver_not_callable);
1541 {
1542 __ Poke(receiver, 0);
1543 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1544 }
1545}
1546
1547
1548// static
1549void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001550 Register argc = x0;
1551 Register function = x1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001552 Register scratch1 = x10;
1553 Register scratch2 = x11;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001554
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001555 ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1556
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001558 {
1559 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001560 __ Cbnz(argc, &done);
1561 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1562 __ Push(scratch1);
1563 __ Mov(argc, 1);
1564 __ Bind(&done);
1565 }
1566
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001567 // 2. Get the callable to call (passed as receiver) from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001568 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001569
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001570 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001571 // (overwriting the original receiver). Adjust argument count to make
1572 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573 {
1574 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001575 // Calculate the copy start address (destination). Copy end address is jssp.
1576 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1577 __ Sub(scratch1, scratch2, kPointerSize);
1578
1579 __ Bind(&loop);
1580 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1581 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1582 __ Cmp(scratch1, jssp);
1583 __ B(ge, &loop);
1584 // Adjust the actual number of arguments and remove the top element
1585 // (which is a copy of the last argument).
1586 __ Sub(argc, argc, 1);
1587 __ Drop(1);
1588 }
1589
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 // 4. Call the callable.
1591 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001592}
1593
1594
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001595void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1596 // ----------- S t a t e -------------
1597 // -- x0 : argc
1598 // -- jssp[0] : argumentsList (if argc == 3)
1599 // -- jssp[8] : thisArgument (if argc >= 2)
1600 // -- jssp[16] : target (if argc >= 1)
1601 // -- jssp[24] : receiver
1602 // -----------------------------------
1603 ASM_LOCATION("Builtins::Generate_ReflectApply");
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001605 Register argc = x0;
1606 Register arguments_list = x0;
1607 Register target = x1;
1608 Register this_argument = x2;
1609 Register undefined_value = x3;
1610
1611 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1612
1613 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1614 // remove all arguments from the stack (including the receiver), and push
1615 // thisArgument (if present) instead.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001616 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1618 // consistent state for a simple pop operation.
1619 __ Claim(3);
1620 __ Drop(argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001621
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001622 // ----------- S t a t e -------------
1623 // -- x0 : argc
1624 // -- jssp[0] : argumentsList (dummy value if argc <= 2)
1625 // -- jssp[8] : thisArgument (dummy value if argc <= 1)
1626 // -- jssp[16] : target (dummy value if argc == 0)
1627 // -- jssp[24] : receiver
1628 // -----------------------------------
1629 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1630 __ Pop(arguments_list, this_argument, target); // Overwrites argc.
1631 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1632 __ Cmp(x10, 2);
1633 __ CmovX(this_argument, undefined_value, lo); // undefined if argc <= 1.
1634 __ CmovX(arguments_list, undefined_value, ls); // undefined if argc <= 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001636 __ Poke(this_argument, 0); // Overwrite receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001637 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001638
1639 // ----------- S t a t e -------------
1640 // -- x0 : argumentsList
1641 // -- x1 : target
1642 // -- jssp[0] : thisArgument
1643 // -----------------------------------
1644
1645 // 2. Make sure the target is actually callable.
1646 Label target_not_callable;
1647 __ JumpIfSmi(target, &target_not_callable);
1648 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1649 __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1650 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
1651
1652 // 3a. Apply the target to the given argumentsList (passing undefined for
1653 // new.target in x3).
1654 DCHECK(undefined_value.Is(x3));
1655 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1656
1657 // 3b. The target is not callable, throw an appropriate TypeError.
1658 __ Bind(&target_not_callable);
1659 {
1660 __ Poke(target, 0);
1661 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1662 }
1663}
1664
1665
1666void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1667 // ----------- S t a t e -------------
1668 // -- x0 : argc
1669 // -- jssp[0] : new.target (optional)
1670 // -- jssp[8] : argumentsList
1671 // -- jssp[16] : target
1672 // -- jssp[24] : receiver
1673 // -----------------------------------
1674 ASM_LOCATION("Builtins::Generate_ReflectConstruct");
1675
1676 Register argc = x0;
1677 Register arguments_list = x0;
1678 Register target = x1;
1679 Register new_target = x3;
1680 Register undefined_value = x4;
1681
1682 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1683
1684 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1685 // new.target into x3 (if present, otherwise use target), remove all
1686 // arguments from the stack (including the receiver), and push thisArgument
1687 // (if present) instead.
1688 {
1689 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1690 // consistent state for a simple pop operation.
1691 __ Claim(3);
1692 __ Drop(argc);
1693
1694 // ----------- S t a t e -------------
1695 // -- x0 : argc
1696 // -- jssp[0] : new.target (dummy value if argc <= 2)
1697 // -- jssp[8] : argumentsList (dummy value if argc <= 1)
1698 // -- jssp[16] : target (dummy value if argc == 0)
1699 // -- jssp[24] : receiver
1700 // -----------------------------------
1701 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1702 __ Pop(new_target, arguments_list, target); // Overwrites argc.
1703 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1704 __ Cmp(x10, 2);
1705 __ CmovX(arguments_list, undefined_value, lo); // undefined if argc <= 1.
1706 __ CmovX(new_target, target, ls); // target if argc <= 2.
1707
1708 __ Poke(undefined_value, 0); // Overwrite receiver.
1709 }
1710
1711 // ----------- S t a t e -------------
1712 // -- x0 : argumentsList
1713 // -- x1 : target
1714 // -- x3 : new.target
1715 // -- jssp[0] : receiver (undefined)
1716 // -----------------------------------
1717
1718 // 2. Make sure the target is actually a constructor.
1719 Label target_not_constructor;
1720 __ JumpIfSmi(target, &target_not_constructor);
1721 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1722 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1723 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1724 &target_not_constructor);
1725
1726 // 3. Make sure the new.target is actually a constructor.
1727 Label new_target_not_constructor;
1728 __ JumpIfSmi(new_target, &new_target_not_constructor);
1729 __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
1730 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1731 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1732 &new_target_not_constructor);
1733
1734 // 4a. Construct the target with the given new.target and argumentsList.
1735 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1736
1737 // 4b. The target is not a constructor, throw an appropriate TypeError.
1738 __ Bind(&target_not_constructor);
1739 {
1740 __ Poke(target, 0);
1741 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1742 }
1743
1744 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1745 __ Bind(&new_target_not_constructor);
1746 {
1747 __ Poke(new_target, 0);
1748 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1749 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001750}
1751
1752
1753static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1754 Label* stack_overflow) {
1755 // ----------- S t a t e -------------
1756 // -- x0 : actual number of arguments
1757 // -- x1 : function (passed through to callee)
1758 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 // -----------------------------------
1761 // Check the stack for overflow.
1762 // We are not trying to catch interruptions (e.g. debug break and
1763 // preemption) here, so the "real stack limit" is checked.
1764 Label enough_stack_space;
1765 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1766 // Make x10 the space we have left. The stack might already be overflowed
1767 // here which will cause x10 to become negative.
1768 __ Sub(x10, jssp, x10);
1769 // Check if the arguments will overflow the stack.
1770 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
1771 __ B(le, stack_overflow);
1772}
1773
1774
1775static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1776 __ SmiTag(x10, x0);
1777 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1778 __ Push(lr, fp);
1779 __ Push(x11, x1, x10);
1780 __ Add(fp, jssp,
1781 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
1782}
1783
1784
1785static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1786 // ----------- S t a t e -------------
1787 // -- x0 : result being passed through
1788 // -----------------------------------
1789 // Get the number of arguments passed (as a smi), tear down the frame and
1790 // then drop the parameters and the receiver.
1791 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1792 kPointerSize)));
1793 __ Mov(jssp, fp);
1794 __ Pop(fp, lr);
1795 __ DropBySMI(x10, kXRegSize);
1796 __ Drop(1);
1797}
1798
1799
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001800// static
1801void Builtins::Generate_Apply(MacroAssembler* masm) {
1802 // ----------- S t a t e -------------
1803 // -- x0 : argumentsList
1804 // -- x1 : target
1805 // -- x3 : new.target (checked to be constructor or undefined)
1806 // -- jssp[0] : thisArgument
1807 // -----------------------------------
1808
1809 Register arguments_list = x0;
1810 Register target = x1;
1811 Register new_target = x3;
1812
1813 Register args = x0;
1814 Register len = x2;
1815
1816 // Create the list of arguments from the array-like argumentsList.
1817 {
1818 Label create_arguments, create_array, create_runtime, done_create;
1819 __ JumpIfSmi(arguments_list, &create_runtime);
1820
1821 // Load native context.
1822 Register native_context = x4;
1823 __ Ldr(native_context, NativeContextMemOperand());
1824
1825 // Load the map of argumentsList.
1826 Register arguments_list_map = x2;
1827 __ Ldr(arguments_list_map,
1828 FieldMemOperand(arguments_list, HeapObject::kMapOffset));
1829
1830 // Check if argumentsList is an (unmodified) arguments object.
1831 __ Ldr(x10, ContextMemOperand(native_context,
1832 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1833 __ Ldr(x11, ContextMemOperand(native_context,
1834 Context::STRICT_ARGUMENTS_MAP_INDEX));
1835 __ Cmp(arguments_list_map, x10);
1836 __ Ccmp(arguments_list_map, x11, ZFlag, ne);
1837 __ B(eq, &create_arguments);
1838
1839 // Check if argumentsList is a fast JSArray.
1840 __ CompareInstanceType(arguments_list_map, native_context, JS_ARRAY_TYPE);
1841 __ B(eq, &create_array);
1842
1843 // Ask the runtime to create the list (actually a FixedArray).
1844 __ Bind(&create_runtime);
1845 {
1846 FrameScope scope(masm, StackFrame::INTERNAL);
1847 __ Push(target, new_target, arguments_list);
1848 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1849 __ Pop(new_target, target);
1850 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
1851 FixedArray::kLengthOffset));
1852 }
1853 __ B(&done_create);
1854
1855 // Try to create the list from an arguments object.
1856 __ Bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001857 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
1858 JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001859 __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
1860 __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
1861 __ CompareAndBranch(len, x11, ne, &create_runtime);
1862 __ Mov(args, x10);
1863 __ B(&done_create);
1864
1865 // Try to create the list from a JSArray object.
1866 __ Bind(&create_array);
1867 __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
1868 __ DecodeField<Map::ElementsKindBits>(x10);
1869 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1870 STATIC_ASSERT(FAST_ELEMENTS == 2);
1871 // Branch for anything that's not FAST_{SMI_}ELEMENTS.
1872 __ TestAndBranchIfAnySet(x10, ~FAST_ELEMENTS, &create_runtime);
1873 __ Ldrsw(len,
1874 UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
1875 __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
1876
1877 __ Bind(&done_create);
1878 }
1879
1880 // Check for stack overflow.
1881 {
1882 // Check the stack for overflow. We are not trying to catch interruptions
1883 // (i.e. debug break and preemption) here, so check the "real stack limit".
1884 Label done;
1885 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1886 // Make x10 the space we have left. The stack might already be overflowed
1887 // here which will cause x10 to become negative.
1888 __ Sub(x10, masm->StackPointer(), x10);
1889 // Check if the arguments will overflow the stack.
1890 __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
1891 __ B(gt, &done); // Signed comparison.
1892 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1893 __ Bind(&done);
1894 }
1895
1896 // ----------- S t a t e -------------
1897 // -- x0 : args (a FixedArray built from argumentsList)
1898 // -- x1 : target
1899 // -- x2 : len (number of elements to push from args)
1900 // -- x3 : new.target (checked to be constructor or undefined)
1901 // -- jssp[0] : thisArgument
1902 // -----------------------------------
1903
1904 // Push arguments onto the stack (thisArgument is already on the stack).
1905 {
1906 Label done, loop;
1907 Register src = x4;
1908
1909 __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
1910 __ Mov(x0, len); // The 'len' argument for Call() or Construct().
1911 __ Cbz(len, &done);
1912 __ Claim(len);
1913 __ Bind(&loop);
1914 __ Sub(len, len, 1);
1915 __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
1916 __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
1917 __ Cbnz(len, &loop);
1918 __ Bind(&done);
1919 }
1920
1921 // ----------- S t a t e -------------
1922 // -- x0 : argument count (len)
1923 // -- x1 : target
1924 // -- x3 : new.target (checked to be constructor or undefined)
1925 // -- jssp[0] : args[len-1]
1926 // -- jssp[8] : args[len-2]
1927 // ... : ...
1928 // -- jssp[8*(len-2)] : args[1]
1929 // -- jssp[8*(len-1)] : args[0]
1930 // -----------------------------------
1931
1932 // Dispatch to Call or Construct depending on whether new.target is undefined.
1933 {
1934 __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
1935 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1936 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1937 }
1938}
1939
Ben Murdoch097c5b22016-05-18 11:27:45 +01001940namespace {
1941
1942// Drops top JavaScript frame and an arguments adaptor frame below it (if
1943// present) preserving all the arguments prepared for current call.
1944// Does nothing if debugger is currently active.
1945// ES6 14.6.3. PrepareForTailCall
1946//
1947// Stack structure for the function g() tail calling f():
1948//
1949// ------- Caller frame: -------
1950// | ...
1951// | g()'s arg M
1952// | ...
1953// | g()'s arg 1
1954// | g()'s receiver arg
1955// | g()'s caller pc
1956// ------- g()'s frame: -------
1957// | g()'s caller fp <- fp
1958// | g()'s context
1959// | function pointer: g
1960// | -------------------------
1961// | ...
1962// | ...
1963// | f()'s arg N
1964// | ...
1965// | f()'s arg 1
1966// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1967// ----------------------
1968//
1969void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1970 Register scratch1, Register scratch2,
1971 Register scratch3) {
1972 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1973 Comment cmnt(masm, "[ PrepareForTailCall");
1974
1975 // Prepare for tail call only if the debugger is not active.
1976 Label done;
1977 ExternalReference debug_is_active =
1978 ExternalReference::debug_is_active_address(masm->isolate());
1979 __ Mov(scratch1, Operand(debug_is_active));
1980 __ Ldrb(scratch1, MemOperand(scratch1));
1981 __ Cmp(scratch1, Operand(0));
1982 __ B(ne, &done);
1983
1984 // Drop possible interpreter handler/stub frame.
1985 {
1986 Label no_interpreter_frame;
1987 __ Ldr(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset));
1988 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
1989 __ B(ne, &no_interpreter_frame);
1990 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1991 __ bind(&no_interpreter_frame);
1992 }
1993
1994 // Check if next frame is an arguments adaptor frame.
1995 Label no_arguments_adaptor, formal_parameter_count_loaded;
1996 __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1997 __ Ldr(scratch3,
1998 MemOperand(scratch2, StandardFrameConstants::kContextOffset));
1999 __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2000 __ B(ne, &no_arguments_adaptor);
2001
2002 // Drop arguments adaptor frame and load arguments count.
2003 __ mov(fp, scratch2);
2004 __ Ldr(scratch1,
2005 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2006 __ SmiUntag(scratch1);
2007 __ B(&formal_parameter_count_loaded);
2008
2009 __ bind(&no_arguments_adaptor);
2010 // Load caller's formal parameter count
2011 __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2012 __ Ldr(scratch1,
2013 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2014 __ Ldrsw(scratch1,
2015 FieldMemOperand(scratch1,
2016 SharedFunctionInfo::kFormalParameterCountOffset));
2017 __ bind(&formal_parameter_count_loaded);
2018
2019 // Calculate the end of destination area where we will put the arguments
2020 // after we drop current frame. We add kPointerSize to count the receiver
2021 // argument which is not included into formal parameters count.
2022 Register dst_reg = scratch2;
2023 __ add(dst_reg, fp, Operand(scratch1, LSL, kPointerSizeLog2));
2024 __ add(dst_reg, dst_reg,
2025 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
2026
2027 Register src_reg = scratch1;
2028 __ add(src_reg, jssp, Operand(args_reg, LSL, kPointerSizeLog2));
2029 // Count receiver argument as well (not included in args_reg).
2030 __ add(src_reg, src_reg, Operand(kPointerSize));
2031
2032 if (FLAG_debug_code) {
2033 __ Cmp(src_reg, dst_reg);
2034 __ Check(lo, kStackAccessBelowStackPointer);
2035 }
2036
2037 // Restore caller's frame pointer and return address now as they will be
2038 // overwritten by the copying loop.
2039 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
2040 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2041
2042 // Now copy callee arguments to the caller frame going backwards to avoid
2043 // callee arguments corruption (source and destination areas could overlap).
2044
2045 // Both src_reg and dst_reg are pointing to the word after the one to copy,
2046 // so they must be pre-decremented in the loop.
2047 Register tmp_reg = scratch3;
2048 Label loop, entry;
2049 __ B(&entry);
2050 __ bind(&loop);
2051 __ Ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
2052 __ Str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
2053 __ bind(&entry);
2054 __ Cmp(jssp, src_reg);
2055 __ B(ne, &loop);
2056
2057 // Leave current frame.
2058 __ Mov(jssp, dst_reg);
2059 __ SetStackPointer(jssp);
2060 __ AssertStackConsistency();
2061
2062 __ bind(&done);
2063}
2064} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002065
2066// static
2067void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002068 ConvertReceiverMode mode,
2069 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002070 ASM_LOCATION("Builtins::Generate_CallFunction");
2071 // ----------- S t a t e -------------
2072 // -- x0 : the number of arguments (not including the receiver)
2073 // -- x1 : the function to call (checked to be a JSFunction)
2074 // -----------------------------------
2075 __ AssertFunction(x1);
2076
2077 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2078 // Check that function is not a "classConstructor".
2079 Label class_constructor;
2080 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2081 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
2082 __ TestAndBranchIfAnySet(
2083 w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
2084 (1 << SharedFunctionInfo::kIsSubclassConstructor) |
2085 (1 << SharedFunctionInfo::kIsBaseConstructor),
2086 &class_constructor);
2087
2088 // Enter the context of the function; ToObject has to run in the function
2089 // context, and we also need to take the global proxy from the function
2090 // context in case of conversion.
2091 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2092 // We need to convert the receiver for non-native sloppy mode functions.
2093 Label done_convert;
2094 __ TestAndBranchIfAnySet(w3,
2095 (1 << SharedFunctionInfo::kNative) |
2096 (1 << SharedFunctionInfo::kStrictModeFunction),
2097 &done_convert);
2098 {
2099 // ----------- S t a t e -------------
2100 // -- x0 : the number of arguments (not including the receiver)
2101 // -- x1 : the function to call (checked to be a JSFunction)
2102 // -- x2 : the shared function info.
2103 // -- cp : the function context.
2104 // -----------------------------------
2105
2106 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2107 // Patch receiver to global proxy.
2108 __ LoadGlobalProxy(x3);
2109 } else {
2110 Label convert_to_object, convert_receiver;
2111 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2112 __ JumpIfSmi(x3, &convert_to_object);
2113 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2114 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2115 __ B(hs, &done_convert);
2116 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2117 Label convert_global_proxy;
2118 __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
2119 &convert_global_proxy);
2120 __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
2121 __ Bind(&convert_global_proxy);
2122 {
2123 // Patch receiver to global proxy.
2124 __ LoadGlobalProxy(x3);
2125 }
2126 __ B(&convert_receiver);
2127 }
2128 __ Bind(&convert_to_object);
2129 {
2130 // Convert receiver using ToObject.
2131 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2132 // in the fast case? (fall back to AllocateInNewSpace?)
2133 FrameScope scope(masm, StackFrame::INTERNAL);
2134 __ SmiTag(x0);
2135 __ Push(x0, x1);
2136 __ Mov(x0, x3);
2137 ToObjectStub stub(masm->isolate());
2138 __ CallStub(&stub);
2139 __ Mov(x3, x0);
2140 __ Pop(x1, x0);
2141 __ SmiUntag(x0);
2142 }
2143 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2144 __ Bind(&convert_receiver);
2145 }
2146 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2147 }
2148 __ Bind(&done_convert);
2149
2150 // ----------- S t a t e -------------
2151 // -- x0 : the number of arguments (not including the receiver)
2152 // -- x1 : the function to call (checked to be a JSFunction)
2153 // -- x2 : the shared function info.
2154 // -- cp : the function context.
2155 // -----------------------------------
2156
Ben Murdoch097c5b22016-05-18 11:27:45 +01002157 if (tail_call_mode == TailCallMode::kAllow) {
2158 PrepareForTailCall(masm, x0, x3, x4, x5);
2159 }
2160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002161 __ Ldrsw(
2162 x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2163 ParameterCount actual(x0);
2164 ParameterCount expected(x2);
2165 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
2166 CheckDebugStepCallWrapper());
2167
2168 // The function is a "classConstructor", need to raise an exception.
2169 __ bind(&class_constructor);
2170 {
2171 FrameScope frame(masm, StackFrame::INTERNAL);
2172 __ Push(x1);
2173 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2174 }
2175}
2176
2177
2178namespace {
2179
2180void Generate_PushBoundArguments(MacroAssembler* masm) {
2181 // ----------- S t a t e -------------
2182 // -- x0 : the number of arguments (not including the receiver)
2183 // -- x1 : target (checked to be a JSBoundFunction)
2184 // -- x3 : new.target (only in case of [[Construct]])
2185 // -----------------------------------
2186
2187 // Load [[BoundArguments]] into x2 and length of that into x4.
2188 Label no_bound_arguments;
2189 __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2190 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2191 __ Cmp(x4, 0);
2192 __ B(eq, &no_bound_arguments);
2193 {
2194 // ----------- S t a t e -------------
2195 // -- x0 : the number of arguments (not including the receiver)
2196 // -- x1 : target (checked to be a JSBoundFunction)
2197 // -- x2 : the [[BoundArguments]] (implemented as FixedArray)
2198 // -- x3 : new.target (only in case of [[Construct]])
2199 // -- x4 : the number of [[BoundArguments]]
2200 // -----------------------------------
2201
2202 // Reserve stack space for the [[BoundArguments]].
2203 {
2204 Label done;
2205 __ Claim(x4);
2206 // Check the stack for overflow. We are not trying to catch interruptions
2207 // (i.e. debug break and preemption) here, so check the "real stack
2208 // limit".
2209 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
2210 __ B(gt, &done); // Signed comparison.
2211 // Restore the stack pointer.
2212 __ Drop(x4);
2213 {
2214 FrameScope scope(masm, StackFrame::MANUAL);
2215 __ EnterFrame(StackFrame::INTERNAL);
2216 __ CallRuntime(Runtime::kThrowStackOverflow);
2217 }
2218 __ Bind(&done);
2219 }
2220
2221 // Relocate arguments down the stack.
2222 {
2223 Label loop, done_loop;
2224 __ Mov(x5, 0);
2225 __ Bind(&loop);
2226 __ Cmp(x5, x0);
2227 __ B(gt, &done_loop);
2228 __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
2229 __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
2230 __ Add(x4, x4, 1);
2231 __ Add(x5, x5, 1);
2232 __ B(&loop);
2233 __ Bind(&done_loop);
2234 }
2235
2236 // Copy [[BoundArguments]] to the stack (below the arguments).
2237 {
2238 Label loop;
2239 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2240 __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
2241 __ Bind(&loop);
2242 __ Sub(x4, x4, 1);
2243 __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
2244 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2245 __ Add(x0, x0, 1);
2246 __ Cmp(x4, 0);
2247 __ B(gt, &loop);
2248 }
2249 }
2250 __ Bind(&no_bound_arguments);
2251}
2252
2253} // namespace
2254
2255
2256// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002257void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2258 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 // ----------- S t a t e -------------
2260 // -- x0 : the number of arguments (not including the receiver)
2261 // -- x1 : the function to call (checked to be a JSBoundFunction)
2262 // -----------------------------------
2263 __ AssertBoundFunction(x1);
2264
Ben Murdoch097c5b22016-05-18 11:27:45 +01002265 if (tail_call_mode == TailCallMode::kAllow) {
2266 PrepareForTailCall(masm, x0, x3, x4, x5);
2267 }
2268
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 // Patch the receiver to [[BoundThis]].
2270 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2271 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2272
2273 // Push the [[BoundArguments]] onto the stack.
2274 Generate_PushBoundArguments(masm);
2275
2276 // Call the [[BoundTargetFunction]] via the Call builtin.
2277 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2278 __ Mov(x10,
2279 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2280 __ Ldr(x11, MemOperand(x10));
2281 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2282 __ Br(x12);
2283}
2284
2285
2286// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002287void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2288 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002289 // ----------- S t a t e -------------
2290 // -- x0 : the number of arguments (not including the receiver)
2291 // -- x1 : the target to call (can be any Object).
2292 // -----------------------------------
2293
2294 Label non_callable, non_function, non_smi;
2295 __ JumpIfSmi(x1, &non_callable);
2296 __ Bind(&non_smi);
2297 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002298 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002299 RelocInfo::CODE_TARGET, eq);
2300 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002301 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002302 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002303
2304 // Check if target has a [[Call]] internal method.
2305 __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2306 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
2307
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308 __ Cmp(x5, JS_PROXY_TYPE);
2309 __ B(ne, &non_function);
2310
Ben Murdoch097c5b22016-05-18 11:27:45 +01002311 // 0. Prepare for tail call if necessary.
2312 if (tail_call_mode == TailCallMode::kAllow) {
2313 PrepareForTailCall(masm, x0, x3, x4, x5);
2314 }
2315
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002316 // 1. Runtime fallback for Proxy [[Call]].
2317 __ Push(x1);
2318 // Increase the arguments size to include the pushed function and the
2319 // existing receiver on the stack.
2320 __ Add(x0, x0, Operand(2));
2321 // Tail-call to the runtime.
2322 __ JumpToExternalReference(
2323 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2324
2325 // 2. Call to something else, which might have a [[Call]] internal method (if
2326 // not we raise an exception).
2327 __ Bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002328 // Overwrite the original receiver with the (original) target.
2329 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2330 // Let the "call_as_function_delegate" take care of the rest.
2331 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2332 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002333 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002334 RelocInfo::CODE_TARGET);
2335
2336 // 3. Call to something that is not callable.
2337 __ bind(&non_callable);
2338 {
2339 FrameScope scope(masm, StackFrame::INTERNAL);
2340 __ Push(x1);
2341 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2342 }
2343}
2344
2345
2346// static
2347void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2348 // ----------- S t a t e -------------
2349 // -- x0 : the number of arguments (not including the receiver)
2350 // -- x1 : the constructor to call (checked to be a JSFunction)
2351 // -- x3 : the new target (checked to be a constructor)
2352 // -----------------------------------
2353 __ AssertFunction(x1);
2354
2355 // Calling convention for function specific ConstructStubs require
2356 // x2 to contain either an AllocationSite or undefined.
2357 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2358
2359 // Tail call to the function-specific construct stub (still in the caller
2360 // context at this point).
2361 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2362 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2363 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2364 __ Br(x4);
2365}
2366
2367
2368// static
2369void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2370 // ----------- S t a t e -------------
2371 // -- x0 : the number of arguments (not including the receiver)
2372 // -- x1 : the function to call (checked to be a JSBoundFunction)
2373 // -- x3 : the new target (checked to be a constructor)
2374 // -----------------------------------
2375 __ AssertBoundFunction(x1);
2376
2377 // Push the [[BoundArguments]] onto the stack.
2378 Generate_PushBoundArguments(masm);
2379
2380 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2381 {
2382 Label done;
2383 __ Cmp(x1, x3);
2384 __ B(ne, &done);
2385 __ Ldr(x3,
2386 FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2387 __ Bind(&done);
2388 }
2389
2390 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2391 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2392 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2393 __ Ldr(x11, MemOperand(x10));
2394 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2395 __ Br(x12);
2396}
2397
2398
2399// static
2400void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2401 // ----------- S t a t e -------------
2402 // -- x0 : the number of arguments (not including the receiver)
2403 // -- x1 : the constructor to call (checked to be a JSProxy)
2404 // -- x3 : the new target (either the same as the constructor or
2405 // the JSFunction on which new was invoked initially)
2406 // -----------------------------------
2407
2408 // Call into the Runtime for Proxy [[Construct]].
2409 __ Push(x1);
2410 __ Push(x3);
2411 // Include the pushed new_target, constructor and the receiver.
2412 __ Add(x0, x0, 3);
2413 // Tail-call to the runtime.
2414 __ JumpToExternalReference(
2415 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2416}
2417
2418
2419// static
2420void Builtins::Generate_Construct(MacroAssembler* masm) {
2421 // ----------- S t a t e -------------
2422 // -- x0 : the number of arguments (not including the receiver)
2423 // -- x1 : the constructor to call (can be any Object)
2424 // -- x3 : the new target (either the same as the constructor or
2425 // the JSFunction on which new was invoked initially)
2426 // -----------------------------------
2427
2428 // Check if target is a Smi.
2429 Label non_constructor;
2430 __ JumpIfSmi(x1, &non_constructor);
2431
2432 // Dispatch based on instance type.
2433 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2434 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2435 RelocInfo::CODE_TARGET, eq);
2436
2437 // Check if target has a [[Construct]] internal method.
2438 __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
2439 __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
2440
2441 // Only dispatch to bound functions after checking whether they are
2442 // constructors.
2443 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2444 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2445 RelocInfo::CODE_TARGET, eq);
2446
2447 // Only dispatch to proxies after checking whether they are constructors.
2448 __ Cmp(x5, JS_PROXY_TYPE);
2449 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2450 eq);
2451
2452 // Called Construct on an exotic Object with a [[Construct]] internal method.
2453 {
2454 // Overwrite the original receiver with the (original) target.
2455 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2456 // Let the "call_as_constructor_delegate" take care of the rest.
2457 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
2458 __ Jump(masm->isolate()->builtins()->CallFunction(),
2459 RelocInfo::CODE_TARGET);
2460 }
2461
2462 // Called Construct on an Object that doesn't have a [[Construct]] internal
2463 // method.
2464 __ bind(&non_constructor);
2465 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2466 RelocInfo::CODE_TARGET);
2467}
2468
2469
2470// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002471void Builtins::Generate_InterpreterPushArgsAndCallImpl(
2472 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002473 // ----------- S t a t e -------------
2474 // -- x0 : the number of arguments (not including the receiver)
2475 // -- x2 : the address of the first argument to be pushed. Subsequent
2476 // arguments should be consecutive above this, in the same order as
2477 // they are to be pushed onto the stack.
2478 // -- x1 : the target to call (can be any Object).
2479 // -----------------------------------
2480
2481 // Find the address of the last argument.
2482 __ add(x3, x0, Operand(1)); // Add one for receiver.
2483 __ lsl(x3, x3, kPointerSizeLog2);
2484 __ sub(x4, x2, x3);
2485
2486 // Push the arguments.
2487 Label loop_header, loop_check;
2488 __ Mov(x5, jssp);
2489 __ Claim(x3, 1);
2490 __ B(&loop_check);
2491 __ Bind(&loop_header);
2492 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
2493 __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
2494 __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
2495 __ Bind(&loop_check);
2496 __ Cmp(x2, x4);
2497 __ B(gt, &loop_header);
2498
2499 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002500 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2501 tail_call_mode),
2502 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002503}
2504
2505
2506// static
2507void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
2508 // ----------- S t a t e -------------
2509 // -- x0 : argument count (not including receiver)
2510 // -- x3 : new target
2511 // -- x1 : constructor to call
2512 // -- x2 : address of the first argument
2513 // -----------------------------------
2514
2515 // Find the address of the last argument.
2516 __ add(x5, x0, Operand(1)); // Add one for receiver (to be constructed).
2517 __ lsl(x5, x5, kPointerSizeLog2);
2518
2519 // Set stack pointer and where to stop.
2520 __ Mov(x6, jssp);
2521 __ Claim(x5, 1);
2522 __ sub(x4, x6, x5);
2523
2524 // Push a slot for the receiver.
2525 __ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
2526
2527 Label loop_header, loop_check;
2528 // Push the arguments.
2529 __ B(&loop_check);
2530 __ Bind(&loop_header);
2531 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
2532 __ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
2533 __ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
2534 __ Bind(&loop_check);
2535 __ Cmp(x6, x4);
2536 __ B(gt, &loop_header);
2537
2538 // Call the constructor with x0, x1, and x3 unmodified.
2539 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2540}
2541
2542
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002543void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2544 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
2545 // ----------- S t a t e -------------
2546 // -- x0 : actual number of arguments
2547 // -- x1 : function (passed through to callee)
2548 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002549 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002550 // -----------------------------------
2551
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002552 Register argc_actual = x0; // Excluding the receiver.
2553 Register argc_expected = x2; // Excluding the receiver.
2554 Register function = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002555 Register code_entry = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002556
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002557 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002558
2559 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002560 __ Cmp(argc_actual, argc_expected);
2561 __ B(lt, &too_few);
2562 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2563 __ B(eq, &dont_adapt_arguments);
2564
2565 { // Enough parameters: actual >= expected
2566 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002567 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002568
2569 Register copy_start = x10;
2570 Register copy_end = x11;
2571 Register copy_to = x12;
2572 Register scratch1 = x13, scratch2 = x14;
2573
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002574 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002575
2576 // Adjust for fp, lr, and the receiver.
2577 __ Add(copy_start, fp, 3 * kPointerSize);
2578 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002579 __ Sub(copy_end, copy_start, scratch2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002580 __ Sub(copy_end, copy_end, kPointerSize);
2581 __ Mov(copy_to, jssp);
2582
2583 // Claim space for the arguments, the receiver, and one extra slot.
2584 // The extra slot ensures we do not write under jssp. It will be popped
2585 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002586 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002587 __ Claim(scratch1, 1);
2588
2589 // Copy the arguments (including the receiver) to the new stack frame.
2590 Label copy_2_by_2;
2591 __ Bind(&copy_2_by_2);
2592 __ Ldp(scratch1, scratch2,
2593 MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
2594 __ Stp(scratch1, scratch2,
2595 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2596 __ Cmp(copy_start, copy_end);
2597 __ B(hi, &copy_2_by_2);
2598
2599 // Correct the space allocated for the extra slot.
2600 __ Drop(1);
2601
2602 __ B(&invoke);
2603 }
2604
2605 { // Too few parameters: Actual < expected
2606 __ Bind(&too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002607
2608 Register copy_from = x10;
2609 Register copy_end = x11;
2610 Register copy_to = x12;
2611 Register scratch1 = x13, scratch2 = x14;
2612
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002613 // If the function is strong we need to throw an error.
2614 Label no_strong_error;
2615 __ Ldr(scratch1,
2616 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2617 __ Ldr(scratch2.W(),
2618 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset));
2619 __ TestAndBranchIfAllClear(scratch2.W(),
2620 (1 << SharedFunctionInfo::kStrongModeFunction),
2621 &no_strong_error);
2622
2623 // What we really care about is the required number of arguments.
2624 DCHECK_EQ(kPointerSize, kInt64Size);
2625 __ Ldr(scratch2.W(),
2626 FieldMemOperand(scratch1, SharedFunctionInfo::kLengthOffset));
2627 __ Cmp(argc_actual, Operand(scratch2, LSR, 1));
2628 __ B(ge, &no_strong_error);
2629
2630 {
2631 FrameScope frame(masm, StackFrame::MANUAL);
2632 EnterArgumentsAdaptorFrame(masm);
2633 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2634 }
2635
2636 __ Bind(&no_strong_error);
2637 EnterArgumentsAdaptorFrame(masm);
2638 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2639
2640 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002641 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
2642
2643 // Adjust for fp, lr, and the receiver.
2644 __ Add(copy_from, fp, 3 * kPointerSize);
2645 __ Add(copy_from, copy_from, argc_actual);
2646 __ Mov(copy_to, jssp);
2647 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
2648 __ Sub(copy_end, copy_end, argc_actual);
2649
2650 // Claim space for the arguments, the receiver, and one extra slot.
2651 // The extra slot ensures we do not write under jssp. It will be popped
2652 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002653 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002654 __ Claim(scratch1, 1);
2655
2656 // Copy the arguments (including the receiver) to the new stack frame.
2657 Label copy_2_by_2;
2658 __ Bind(&copy_2_by_2);
2659 __ Ldp(scratch1, scratch2,
2660 MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
2661 __ Stp(scratch1, scratch2,
2662 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2663 __ Cmp(copy_to, copy_end);
2664 __ B(hi, &copy_2_by_2);
2665
2666 __ Mov(copy_to, copy_end);
2667
2668 // Fill the remaining expected arguments with undefined.
2669 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
2670 __ Add(copy_end, jssp, kPointerSize);
2671
2672 Label fill;
2673 __ Bind(&fill);
2674 __ Stp(scratch1, scratch1,
2675 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2676 __ Cmp(copy_to, copy_end);
2677 __ B(hi, &fill);
2678
2679 // Correct the space allocated for the extra slot.
2680 __ Drop(1);
2681 }
2682
2683 // Arguments have been adapted. Now call the entry point.
2684 __ Bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002685 __ Mov(argc_actual, argc_expected);
2686 // x0 : expected number of arguments
2687 // x1 : function (passed through to callee)
2688 // x3 : new target (passed through to callee)
2689 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002690 __ Call(code_entry);
2691
2692 // Store offset of return address for deoptimizer.
2693 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2694
2695 // Exit frame and return.
2696 LeaveArgumentsAdaptorFrame(masm);
2697 __ Ret();
2698
2699 // Call the entry point without adapting the arguments.
2700 __ Bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002701 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002702 __ Jump(code_entry);
2703
2704 __ Bind(&stack_overflow);
2705 {
2706 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002707 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708 __ Unreachable();
2709 }
2710}
2711
2712
2713#undef __
2714
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002715} // namespace internal
2716} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002717
2718#endif // V8_TARGET_ARCH_ARM