blob: b6bae4ad0edc308cfb6d09b3628a05efdadec06f [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM64
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16
17
18#define __ ACCESS_MASM(masm)
19
20
21// Load the built-in Array function from the current context.
22static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025}
26
27
28// Load the built-in InternalArray function from the current context.
29static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
30 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033}
34
35
36void Builtins::Generate_Adaptor(MacroAssembler* masm,
37 CFunctionId id,
38 BuiltinExtraArguments extra_args) {
39 // ----------- S t a t e -------------
40 // -- x0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 // -- x1 : target
42 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 // -- sp[0] : last argument
44 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000045 // -- sp[4 * (argc - 1)] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046 // -- sp[4 * argc] : receiver
47 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000048 __ AssertFunction(x1);
49
50 // Make sure we operate in the context of the called function (for example
51 // ConstructStubs implemented in C++ will be run in the context of the caller
52 // instead of the callee, due to the way that [[Construct]] is defined for
53 // ordinary functions).
54 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055
56 // Insert extra arguments.
57 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 switch (extra_args) {
59 case BuiltinExtraArguments::kTarget:
60 __ Push(x1);
61 ++num_extra_args;
62 break;
63 case BuiltinExtraArguments::kNewTarget:
64 __ Push(x3);
65 ++num_extra_args;
66 break;
67 case BuiltinExtraArguments::kTargetAndNewTarget:
68 __ Push(x1, x3);
69 num_extra_args += 2;
70 break;
71 case BuiltinExtraArguments::kNone:
72 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 }
74
75 // JumpToExternalReference expects x0 to contain the number of arguments
76 // including the receiver and the extra arguments.
77 __ Add(x0, x0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
80}
81
82
83void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
84 // ----------- S t a t e -------------
85 // -- x0 : number of arguments
86 // -- lr : return address
87 // -- sp[...]: constructor arguments
88 // -----------------------------------
89 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
90 Label generic_array_code;
91
92 // Get the InternalArray function.
93 GenerateLoadInternalArrayFunction(masm, x1);
94
95 if (FLAG_debug_code) {
96 // Initial map for the builtin InternalArray functions should be maps.
97 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
98 __ Tst(x10, kSmiTagMask);
99 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
100 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
102 }
103
104 // Run the native code for the InternalArray function called as a normal
105 // function.
106 InternalArrayConstructorStub stub(masm->isolate());
107 __ TailCallStub(&stub);
108}
109
110
111void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
112 // ----------- S t a t e -------------
113 // -- x0 : number of arguments
114 // -- lr : return address
115 // -- sp[...]: constructor arguments
116 // -----------------------------------
117 ASM_LOCATION("Builtins::Generate_ArrayCode");
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
119
120 // Get the Array function.
121 GenerateLoadArrayFunction(masm, x1);
122
123 if (FLAG_debug_code) {
124 // Initial map for the builtin Array functions should be maps.
125 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
126 __ Tst(x10, kSmiTagMask);
127 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
128 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
129 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130 }
131
132 // Run the native code for the Array function called as a normal function.
133 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000134 __ Mov(x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 ArrayConstructorStub stub(masm->isolate());
136 __ TailCallStub(&stub);
137}
138
139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140// static
141void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 // ----------- S t a t e -------------
143 // -- x0 : number of arguments
144 // -- x1 : constructor function
145 // -- lr : return address
146 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
147 // -- sp[argc * 8] : receiver
148 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000149 ASM_LOCATION("Builtins::Generate_NumberConstructor");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151 // 1. Load the first argument into x0 and get rid of the rest (including the
152 // receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153 Label no_arguments;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154 {
155 __ Cbz(x0, &no_arguments);
156 __ Sub(x0, x0, 1);
157 __ Drop(x0);
158 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161 // 2a. Convert first argument to number.
162 ToNumberStub stub(masm->isolate());
163 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000164
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000165 // 2b. No arguments, return +0 (already in x0).
166 __ Bind(&no_arguments);
167 __ Drop(1);
168 __ Ret();
169}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000172// static
173void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
174 // ----------- S t a t e -------------
175 // -- x0 : number of arguments
176 // -- x1 : constructor function
177 // -- x3 : new target
178 // -- lr : return address
179 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
180 // -- sp[argc * 8] : receiver
181 // -----------------------------------
182 ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
183
184 // 1. Make sure we operate in the context of the called function.
185 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
186
187 // 2. Load the first argument into x2 and get rid of the rest (including the
188 // receiver).
189 {
190 Label no_arguments, done;
191 __ Cbz(x0, &no_arguments);
192 __ Sub(x0, x0, 1);
193 __ Drop(x0);
194 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
195 __ B(&done);
196 __ Bind(&no_arguments);
197 __ Drop(1);
198 __ Mov(x2, Smi::FromInt(0));
199 __ Bind(&done);
200 }
201
202 // 3. Make sure x2 is a number.
203 {
204 Label done_convert;
205 __ JumpIfSmi(x2, &done_convert);
206 __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
207 {
208 FrameScope scope(masm, StackFrame::INTERNAL);
209 __ Push(x1, x3);
210 __ Move(x0, x2);
211 ToNumberStub stub(masm->isolate());
212 __ CallStub(&stub);
213 __ Move(x2, x0);
214 __ Pop(x3, x1);
215 }
216 __ Bind(&done_convert);
217 }
218
219 // 4. Check if new target and constructor differ.
220 Label new_object;
221 __ Cmp(x1, x3);
222 __ B(ne, &new_object);
223
224 // 5. Allocate a JSValue wrapper for the number.
225 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226 __ Ret();
227
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000228 // 6. Fallback to the runtime to create new object.
229 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000230 {
231 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000232 __ Push(x2, x1, x3); // first argument, constructor, new target
233 __ CallRuntime(Runtime::kNewObject);
234 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000235 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000236 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
237 __ Ret();
238}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240
241// static
242void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
243 // ----------- S t a t e -------------
244 // -- x0 : number of arguments
245 // -- x1 : constructor function
246 // -- lr : return address
247 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
248 // -- sp[argc * 8] : receiver
249 // -----------------------------------
250 ASM_LOCATION("Builtins::Generate_StringConstructor");
251
252 // 1. Load the first argument into x0 and get rid of the rest (including the
253 // receiver).
254 Label no_arguments;
255 {
256 __ Cbz(x0, &no_arguments);
257 __ Sub(x0, x0, 1);
258 __ Drop(x0);
259 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
260 }
261
262 // 2a. At least one argument, return x0 if it's a string, otherwise
263 // dispatch to appropriate conversion.
264 Label to_string, symbol_descriptive_string;
265 {
266 __ JumpIfSmi(x0, &to_string);
267 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
268 __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
269 __ B(hi, &to_string);
270 __ B(eq, &symbol_descriptive_string);
271 __ Ret();
272 }
273
274 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 __ Bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 {
277 __ LoadRoot(x0, Heap::kempty_stringRootIndex);
278 __ Drop(1);
279 __ Ret();
280 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000281
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282 // 3a. Convert x0 to a string.
283 __ Bind(&to_string);
284 {
285 ToStringStub stub(masm->isolate());
286 __ TailCallStub(&stub);
287 }
288
289 // 3b. Convert symbol in x0 to a string.
290 __ Bind(&symbol_descriptive_string);
291 {
292 __ Push(x0);
293 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
294 }
295}
296
297
298// static
299void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
300 // ----------- S t a t e -------------
301 // -- x0 : number of arguments
302 // -- x1 : constructor function
303 // -- x3 : new target
304 // -- lr : return address
305 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
306 // -- sp[argc * 8] : receiver
307 // -----------------------------------
308 ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
309
310 // 1. Make sure we operate in the context of the called function.
311 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
312
313 // 2. Load the first argument into x2 and get rid of the rest (including the
314 // receiver).
315 {
316 Label no_arguments, done;
317 __ Cbz(x0, &no_arguments);
318 __ Sub(x0, x0, 1);
319 __ Drop(x0);
320 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
321 __ B(&done);
322 __ Bind(&no_arguments);
323 __ Drop(1);
324 __ LoadRoot(x2, Heap::kempty_stringRootIndex);
325 __ Bind(&done);
326 }
327
328 // 3. Make sure x2 is a string.
329 {
330 Label convert, done_convert;
331 __ JumpIfSmi(x2, &convert);
332 __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
333 __ Bind(&convert);
334 {
335 FrameScope scope(masm, StackFrame::INTERNAL);
336 ToStringStub stub(masm->isolate());
337 __ Push(x1, x3);
338 __ Move(x0, x2);
339 __ CallStub(&stub);
340 __ Move(x2, x0);
341 __ Pop(x3, x1);
342 }
343 __ Bind(&done_convert);
344 }
345
346 // 4. Check if new target and constructor differ.
347 Label new_object;
348 __ Cmp(x1, x3);
349 __ B(ne, &new_object);
350
351 // 5. Allocate a JSValue wrapper for the string.
352 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
353 __ Ret();
354
355 // 6. Fallback to the runtime to create new object.
356 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 {
358 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000359 __ Push(x2, x1, x3); // first argument, constructor, new target
360 __ CallRuntime(Runtime::kNewObject);
361 __ Pop(x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000362 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000363 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000364 __ Ret();
365}
366
367
368static void CallRuntimePassFunction(MacroAssembler* masm,
369 Runtime::FunctionId function_id) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 // ----------- S t a t e -------------
371 // -- x1 : target function (preserved for callee)
372 // -- x3 : new target (preserved for callee)
373 // -----------------------------------
374
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000375 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 // Push a copy of the target function and the new target.
377 // Push another copy as a parameter to the runtime call.
378 __ Push(x1, x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000379
380 __ CallRuntime(function_id, 1);
381
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000382 // Restore target function and new target.
383 __ Pop(x3, x1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000384}
385
386
387static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
388 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
389 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
390 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
391 __ Br(x2);
392}
393
394
395static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
396 __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag);
397 __ Br(x0);
398}
399
400
401void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
402 // Checking whether the queued function is ready for install is optional,
403 // since we come across interrupts and stack checks elsewhere. However, not
404 // checking may delay installing ready functions, and always checking would be
405 // quite expensive. A good compromise is to first check against stack limit as
406 // a cue for an interrupt signal.
407 Label ok;
408 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
409 __ B(hs, &ok);
410
411 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
412 GenerateTailCallToReturnedCode(masm);
413
414 __ Bind(&ok);
415 GenerateTailCallToSharedCode(masm);
416}
417
418
419static void Generate_JSConstructStubHelper(MacroAssembler* masm,
420 bool is_api_function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000421 bool create_implicit_receiver) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000422 // ----------- S t a t e -------------
423 // -- x0 : number of arguments
424 // -- x1 : constructor function
425 // -- x2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000426 // -- x3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 // -- lr : return address
428 // -- sp[...]: constructor arguments
429 // -----------------------------------
430
431 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432
433 Isolate* isolate = masm->isolate();
434
435 // Enter a construct frame.
436 {
437 FrameScope scope(masm, StackFrame::CONSTRUCT);
438
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439 // Preserve the four incoming parameters on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000440 Register argc = x0;
441 Register constructor = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000442 Register allocation_site = x2;
443 Register new_target = x3;
444
445 // Preserve the incoming parameters on the stack.
446 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000447 __ SmiTag(argc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448 __ Push(allocation_site, argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000450 if (create_implicit_receiver) {
451 // sp[0]: new.target
452 // sp[1]: Constructor function.
453 // sp[2]: number of arguments (smi-tagged)
454 // sp[3]: allocation site
455 // Try to allocate the object without transitioning into C code. If any of
456 // the preconditions is not met, the code bails out to the runtime call.
457 Label rt_call, allocated;
458 if (FLAG_inline_new) {
459 // Verify that the new target is a JSFunction.
460 __ JumpIfNotObjectType(new_target, x10, x11, JS_FUNCTION_TYPE,
461 &rt_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000463 // Load the initial map and verify that it is in fact a map.
464 Register init_map = x2;
465 __ Ldr(init_map,
466 FieldMemOperand(new_target,
467 JSFunction::kPrototypeOrInitialMapOffset));
468 __ JumpIfSmi(init_map, &rt_call);
469 __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000470
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000471 // Fall back to runtime if the expected base constructor and base
472 // constructor differ.
473 __ Ldr(x10,
474 FieldMemOperand(init_map, Map::kConstructorOrBackPointerOffset));
475 __ Cmp(constructor, x10);
476 __ B(ne, &rt_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478 // Check that the constructor is not constructing a JSFunction (see
479 // comments in Runtime_NewObject in runtime.cc). In which case the
480 // initial
481 // map's instance type would be JS_FUNCTION_TYPE.
482 __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE);
483 __ B(eq, &rt_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000484
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000485 // Now allocate the JSObject on the heap.
486 Register obj_size = x10;
487 Register new_obj = x4;
488 Register next_obj = obj_size; // May overlap.
489 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
490 __ Allocate(obj_size, new_obj, next_obj, x11, &rt_call, SIZE_IN_WORDS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000491
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000492 // Allocated the JSObject, now initialize the fields. Map is set to
493 // initial map and properties and elements are set to empty fixed array.
494 // NB. the object pointer is not tagged, so MemOperand is used.
495 Register write_address = x5;
496 Register empty = x7;
497 __ Mov(write_address, new_obj);
498 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
499 STATIC_ASSERT(0 * kPointerSize == JSObject::kMapOffset);
500 __ Str(init_map, MemOperand(write_address, kPointerSize, PostIndex));
501 STATIC_ASSERT(1 * kPointerSize == JSObject::kPropertiesOffset);
502 STATIC_ASSERT(2 * kPointerSize == JSObject::kElementsOffset);
503 __ Stp(empty, empty,
504 MemOperand(write_address, 2 * kPointerSize, PostIndex));
505 STATIC_ASSERT(3 * kPointerSize == JSObject::kHeaderSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000506
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000507 // Add the object tag to make the JSObject real, so that we can continue
508 // and jump into the continuation code at any time from now on.
509 __ Add(new_obj, new_obj, kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000511 // Fill all of the in-object properties with the appropriate filler.
512 Register filler = x7;
513 __ LoadRoot(filler, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515 if (!is_api_function) {
516 Label no_inobject_slack_tracking;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518 Register constructon_count = x14;
519 MemOperand bit_field3 =
520 FieldMemOperand(init_map, Map::kBitField3Offset);
521 // Check if slack tracking is enabled.
522 __ Ldr(x11, bit_field3);
523 __ DecodeField<Map::ConstructionCounter>(constructon_count, x11);
524 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
525 __ B(lt, &no_inobject_slack_tracking);
526 // Decrease generous allocation count.
527 __ Subs(x11, x11, Operand(1 << Map::ConstructionCounter::kShift));
528 __ Str(x11, bit_field3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000530 // Allocate object with a slack.
531 Register unused_props = x11;
532 __ Ldr(unused_props,
533 FieldMemOperand(init_map, Map::kInstanceAttributesOffset));
534 __ Ubfx(unused_props, unused_props,
535 Map::kUnusedPropertyFieldsByte * kBitsPerByte, kBitsPerByte);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000537 Register end_of_pre_allocated = x11;
538 __ Sub(end_of_pre_allocated, next_obj,
539 Operand(unused_props, LSL, kPointerSizeLog2));
540 unused_props = NoReg;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000541
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000542 if (FLAG_debug_code) {
543 __ Cmp(write_address, end_of_pre_allocated);
544 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
545 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547 // Fill the pre-allocated fields with undef.
548 __ InitializeFieldsWithFiller(write_address, end_of_pre_allocated,
549 filler);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 // Fill the remaining fields with one pointer filler map.
552 __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
553 __ InitializeFieldsWithFiller(write_address, next_obj, filler);
554
555 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
556 __ B(ne, &allocated);
557
558 // Push the constructor, new_target and the object to the stack,
559 // and then the initial map as an argument to the runtime call.
560 __ Push(constructor, new_target, new_obj, init_map);
561 __ CallRuntime(Runtime::kFinalizeInstanceSize);
562 __ Pop(new_obj, new_target, constructor);
563
564 // Continue with JSObject being successfully allocated.
565 __ B(&allocated);
566
567 __ bind(&no_inobject_slack_tracking);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568 }
569
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000570 __ InitializeFieldsWithFiller(write_address, next_obj, filler);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000571
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000572 // Continue with JSObject being successfully allocated.
573 __ B(&allocated);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 }
575
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000576 // Allocate the new receiver object using the runtime call.
577 // x1: constructor function
578 // x3: new target
579 __ Bind(&rt_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000581 // Push the constructor and new_target twice, second pair as arguments
582 // to the runtime call.
583 __ Push(constructor, new_target, constructor, new_target);
584 __ CallRuntime(Runtime::kNewObject);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 __ Mov(x4, x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 __ Pop(new_target, constructor);
587
588 // Receiver for constructor call allocated.
589 // x1: constructor function
590 // x3: new target
591 // x4: JSObject
592 __ Bind(&allocated);
593
594 // Reload the number of arguments from the stack.
595 // Set it up in x0 for the function call below.
596 // jssp[0]: number of arguments (smi-tagged)
597 __ Peek(argc, 0); // Load number of arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598 }
599
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000600 __ SmiUntag(argc);
601
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000602 if (create_implicit_receiver) {
603 // Push the allocated receiver to the stack. We need two copies
604 // because we may have to return the original one and the calling
605 // conventions dictate that the called function pops the receiver.
606 __ Push(x4, x4);
607 } else {
608 __ PushRoot(Heap::kTheHoleValueRootIndex);
609 }
610
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 // Set up pointer to last argument.
612 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
613
614 // Copy arguments and receiver to the expression stack.
615 // Copy 2 values every loop to use ldp/stp.
616 // x0: number of arguments
617 // x1: constructor function
618 // x2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619 // x3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000620 // jssp[0]: receiver
621 // jssp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622 // jssp[2]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000623 // Compute the start address of the copy in x3.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000625 Label loop, entry, done_copying_arguments;
626 __ B(&entry);
627 __ Bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000628 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 __ Push(x11, x10);
630 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631 __ Cmp(x4, x2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000632 __ B(gt, &loop);
633 // Because we copied values 2 by 2 we may have copied one extra value.
634 // Drop it if that is the case.
635 __ B(eq, &done_copying_arguments);
636 __ Drop(1);
637 __ Bind(&done_copying_arguments);
638
639 // Call the function.
640 // x0: number of arguments
641 // x1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000642 // x3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000643 if (is_api_function) {
644 __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset));
645 Handle<Code> code =
646 masm->isolate()->builtins()->HandleApiCallConstruct();
647 __ Call(code, RelocInfo::CODE_TARGET);
648 } else {
649 ParameterCount actual(argc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000650 __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
651 CheckDebugStepCallWrapper());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 }
653
654 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000655 if (create_implicit_receiver && !is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000656 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
657 }
658
659 // Restore the context from the frame.
660 // x0: result
661 // jssp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662 // jssp[1]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000663 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
664
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000665 if (create_implicit_receiver) {
666 // If the result is an object (in the ECMA sense), we should get rid
667 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
668 // on page 74.
669 Label use_receiver, exit;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000671 // If the result is a smi, it is *not* an object in the ECMA sense.
672 // x0: result
673 // jssp[0]: receiver (newly allocated object)
674 // jssp[1]: number of arguments (smi-tagged)
675 __ JumpIfSmi(x0, &use_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000677 // If the type of the result (stored in its map) is less than
678 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
679 __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000680
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000681 // Throw away the result of the constructor invocation and use the
682 // on-stack receiver as the result.
683 __ Bind(&use_receiver);
684 __ Peek(x0, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000685
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000686 // Remove the receiver from the stack, remove caller arguments, and
687 // return.
688 __ Bind(&exit);
689 // x0: result
690 // jssp[0]: receiver (newly allocated object)
691 // jssp[1]: number of arguments (smi-tagged)
692 __ Peek(x1, 1 * kXRegSize);
693 } else {
694 __ Peek(x1, 0);
695 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696
697 // Leave construct frame.
698 }
699
700 __ DropBySMI(x1);
701 __ Drop(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702 if (create_implicit_receiver) {
703 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
704 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705 __ Ret();
706}
707
708
709void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000710 Generate_JSConstructStubHelper(masm, false, true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711}
712
713
714void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000715 Generate_JSConstructStubHelper(masm, true, true);
716}
717
718
719void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
720 Generate_JSConstructStubHelper(masm, false, false);
721}
722
723
724void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
725 FrameScope scope(masm, StackFrame::INTERNAL);
726 __ Push(x1);
727 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
728}
729
730
731enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
732
733
734// Clobbers x10, x15; preserves all other registers.
735static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
736 IsTagged argc_is_tagged) {
737 // Check the stack for overflow.
738 // We are not trying to catch interruptions (e.g. debug break and
739 // preemption) here, so the "real stack limit" is checked.
740 Label enough_stack_space;
741 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
742 // Make x10 the space we have left. The stack might already be overflowed
743 // here which will cause x10 to become negative.
744 // TODO(jbramley): Check that the stack usage here is safe.
745 __ Sub(x10, jssp, x10);
746 // Check if the arguments will overflow the stack.
747 if (argc_is_tagged == kArgcIsSmiTagged) {
748 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
749 } else {
750 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
751 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
752 }
753 __ B(gt, &enough_stack_space);
754 __ CallRuntime(Runtime::kThrowStackOverflow);
755 // We should never return from the APPLY_OVERFLOW builtin.
756 if (__ emit_debug_code()) {
757 __ Unreachable();
758 }
759
760 __ Bind(&enough_stack_space);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000761}
762
763
764// Input:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000765// x0: new.target.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000766// x1: function.
767// x2: receiver.
768// x3: argc.
769// x4: argv.
770// Output:
771// x0: result.
772static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
773 bool is_construct) {
774 // Called from JSEntryStub::GenerateBody().
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000775 Register new_target = x0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000776 Register function = x1;
777 Register receiver = x2;
778 Register argc = x3;
779 Register argv = x4;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000780 Register scratch = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781
782 ProfileEntryHookStub::MaybeCallEntryHook(masm);
783
784 // Clear the context before we push it when entering the internal frame.
785 __ Mov(cp, 0);
786
787 {
788 // Enter an internal frame.
789 FrameScope scope(masm, StackFrame::INTERNAL);
790
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791 // Setup the context (we need to use the caller context from the isolate).
792 __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
793 masm->isolate())));
794 __ Ldr(cp, MemOperand(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795
796 __ InitializeRootRegister();
797
798 // Push the function and the receiver onto the stack.
799 __ Push(function, receiver);
800
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 // Check if we have enough stack space to push all arguments.
802 // Expects argument count in eax. Clobbers ecx, edx, edi.
803 Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
804
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000805 // Copy arguments to the stack in a loop, in reverse order.
806 // x3: argc.
807 // x4: argv.
808 Label loop, entry;
809 // Compute the copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811
812 __ B(&entry);
813 __ Bind(&loop);
814 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
815 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
816 __ Push(x12); // Push the argument.
817 __ Bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000818 __ Cmp(scratch, argv);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000819 __ B(ne, &loop);
820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000821 __ Mov(scratch, argc);
822 __ Mov(argc, new_target);
823 __ Mov(new_target, scratch);
824 // x0: argc.
825 // x3: new.target.
826
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 // Initialize all JavaScript callee-saved registers, since they will be seen
828 // by the garbage collector as part of handlers.
829 // The original values have been saved in JSEntryStub::GenerateBody().
830 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
831 __ Mov(x20, x19);
832 __ Mov(x21, x19);
833 __ Mov(x22, x19);
834 __ Mov(x23, x19);
835 __ Mov(x24, x19);
836 __ Mov(x25, x19);
837 // Don't initialize the reserved registers.
838 // x26 : root register (root).
839 // x27 : context pointer (cp).
840 // x28 : JS stack pointer (jssp).
841 // x29 : frame pointer (fp).
842
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000843 Handle<Code> builtin = is_construct
844 ? masm->isolate()->builtins()->Construct()
845 : masm->isolate()->builtins()->Call();
846 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000848 // Exit the JS internal frame and remove the parameters (except function),
849 // and return.
850 }
851
852 // Result is in x0. Return.
853 __ Ret();
854}
855
856
857void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
858 Generate_JSEntryTrampolineHelper(masm, false);
859}
860
861
862void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
863 Generate_JSEntryTrampolineHelper(masm, true);
864}
865
866
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000867// Generate code for entering a JS function with the interpreter.
868// On entry to the function the receiver and arguments have been pushed on the
869// stack left to right. The actual argument count matches the formal parameter
870// count expected by the function.
871//
872// The live registers are:
873// - x1: the JS function object being called.
874// - x3: the new target
875// - cp: our context.
876// - fp: our caller's frame pointer.
877// - jssp: stack pointer.
878// - lr: return address.
879//
880// The function builds a JS frame. Please see JavaScriptFrameConstants in
881// frames-arm64.h for its layout.
882// TODO(rmcilroy): We will need to include the current bytecode pointer in the
883// frame.
884void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
885 // Open a frame scope to indicate that there is a frame on the stack. The
886 // MANUAL indicates that the scope shouldn't actually generate code to set up
887 // the frame (that is done below).
888 FrameScope frame_scope(masm, StackFrame::MANUAL);
889 __ Push(lr, fp, cp, x1);
890 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
891 __ Push(x3);
892
893 // Push zero for bytecode array offset.
894 __ Mov(x0, Operand(0));
895 __ Push(x0);
896
897 // Get the bytecode array from the function object and load the pointer to the
898 // first entry into kInterpreterBytecodeRegister.
899 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
900 __ Ldr(kInterpreterBytecodeArrayRegister,
901 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
902
903 if (FLAG_debug_code) {
904 // Check function data field is actually a BytecodeArray object.
905 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
906 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
907 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
908 BYTECODE_ARRAY_TYPE);
909 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
910 }
911
912 // Allocate the local and temporary register file on the stack.
913 {
914 // Load frame size from the BytecodeArray object.
915 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
916 BytecodeArray::kFrameSizeOffset));
917
918 // Do a stack check to ensure we don't go over the limit.
919 Label ok;
920 DCHECK(jssp.Is(__ StackPointer()));
921 __ Sub(x10, jssp, Operand(x11));
922 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
923 __ B(hs, &ok);
924 __ CallRuntime(Runtime::kThrowStackOverflow);
925 __ Bind(&ok);
926
927 // If ok, push undefined as the initial value for all register file entries.
928 // Note: there should always be at least one stack slot for the return
929 // register in the register file.
930 Label loop_header;
931 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
932 // TODO(rmcilroy): Ensure we always have an even number of registers to
933 // allow stack to be 16 bit aligned (and remove need for jssp).
934 __ Lsr(x11, x11, kPointerSizeLog2);
935 __ PushMultipleTimes(x10, x11);
936 __ Bind(&loop_header);
937 }
938
939 // TODO(rmcilroy): List of things not currently dealt with here but done in
940 // fullcodegen's prologue:
941 // - Support profiler (specifically profiling_counter).
942 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
943 // - Allow simulator stop operations if FLAG_stop_at is set.
944 // - Code aging of the BytecodeArray object.
945
946 // Perform stack guard check.
947 {
948 Label ok;
949 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
950 __ B(hs, &ok);
951 __ Push(kInterpreterBytecodeArrayRegister);
952 __ CallRuntime(Runtime::kStackGuard);
953 __ Pop(kInterpreterBytecodeArrayRegister);
954 __ Bind(&ok);
955 }
956
957 // Load accumulator, register file, bytecode offset, dispatch table into
958 // registers.
959 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
960 __ Add(kInterpreterRegisterFileRegister, fp,
961 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
962 __ Mov(kInterpreterBytecodeOffsetRegister,
963 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
964 __ LoadRoot(kInterpreterDispatchTableRegister,
965 Heap::kInterpreterTableRootIndex);
966 __ Add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
967 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
968
969 // Dispatch to the first bytecode handler for the function.
970 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
971 kInterpreterBytecodeOffsetRegister));
972 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
973 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
974 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
975 // and header removal.
976 __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
977 __ Call(ip0);
978}
979
980
981void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
982 // TODO(rmcilroy): List of things not currently dealt with here but done in
983 // fullcodegen's EmitReturnSequence.
984 // - Supporting FLAG_trace for Runtime::TraceExit.
985 // - Support profiler (specifically decrementing profiling_counter
986 // appropriately and calling out to HandleInterrupts if necessary).
987
988 // The return value is in accumulator, which is already in x0.
989
990 // Leave the frame (also dropping the register file).
991 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
992
993 // Drop receiver + arguments and return.
994 __ Ldr(w1, FieldMemOperand(kInterpreterBytecodeArrayRegister,
995 BytecodeArray::kParameterSizeOffset));
996 __ Drop(x1, 1);
997 __ Ret();
998}
999
1000
1001static void Generate_InterpreterNotifyDeoptimizedHelper(
1002 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1003 // Enter an internal frame.
1004 {
1005 FrameScope scope(masm, StackFrame::INTERNAL);
1006 __ Push(kInterpreterAccumulatorRegister); // Save accumulator register.
1007
1008 // Pass the deoptimization type to the runtime system.
1009 __ Mov(x1, Operand(Smi::FromInt(static_cast<int>(type))));
1010 __ Push(x1);
1011 __ CallRuntime(Runtime::kNotifyDeoptimized);
1012
1013 __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register.
1014 // Tear down internal frame.
1015 }
1016
1017 // Drop state (we don't use this for interpreter deopts).
1018 __ Drop(1);
1019
1020 // Initialize register file register and dispatch table register.
1021 __ Add(kInterpreterRegisterFileRegister, fp,
1022 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
1023 __ LoadRoot(kInterpreterDispatchTableRegister,
1024 Heap::kInterpreterTableRootIndex);
1025 __ Add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
1026 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1027
1028 // Get the context from the frame.
1029 // TODO(rmcilroy): Update interpreter frame to expect current context at the
1030 // context slot instead of the function context.
1031 __ Ldr(kContextRegister,
1032 MemOperand(kInterpreterRegisterFileRegister,
1033 InterpreterFrameConstants::kContextFromRegisterPointer));
1034
1035 // Get the bytecode array pointer from the frame.
1036 __ Ldr(x1,
1037 MemOperand(kInterpreterRegisterFileRegister,
1038 InterpreterFrameConstants::kFunctionFromRegisterPointer));
1039 __ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1040 __ Ldr(kInterpreterBytecodeArrayRegister,
1041 FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset));
1042
1043 if (FLAG_debug_code) {
1044 // Check function data field is actually a BytecodeArray object.
1045 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1046 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1047 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1048 BYTECODE_ARRAY_TYPE);
1049 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1050 }
1051
1052 // Get the target bytecode offset from the frame.
1053 __ Ldr(kInterpreterBytecodeOffsetRegister,
1054 MemOperand(
1055 kInterpreterRegisterFileRegister,
1056 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1057 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1058
1059 // Dispatch to the target bytecode.
1060 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1061 kInterpreterBytecodeOffsetRegister));
1062 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1063 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1064 __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
1065 __ Jump(ip0);
1066}
1067
1068
1069void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1070 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1071}
1072
1073
1074void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1075 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1076}
1077
1078
1079void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1080 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1081}
1082
1083
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001084void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1085 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1086 GenerateTailCallToReturnedCode(masm);
1087}
1088
1089
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001090void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001091 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001092 GenerateTailCallToReturnedCode(masm);
1093}
1094
1095
1096void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001098 GenerateTailCallToReturnedCode(masm);
1099}
1100
1101
1102static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1103 // For now, we are relying on the fact that make_code_young doesn't do any
1104 // garbage collection which allows us to save/restore the registers without
1105 // worrying about which of them contain pointers. We also don't build an
1106 // internal frame to make the code fast, since we shouldn't have to do stack
1107 // crawls in MakeCodeYoung. This seems a bit fragile.
1108
1109 // The following caller-saved registers must be saved and restored when
1110 // calling through to the runtime:
1111 // x0 - The address from which to resume execution.
1112 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001113 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001114 // lr - The return address for the JSFunction itself. It has not yet been
1115 // preserved on the stack because the frame setup code was replaced
1116 // with a call to this stub, to handle code ageing.
1117 {
1118 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001119 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001120 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1121 __ CallCFunction(
1122 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001123 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124 }
1125
1126 // The calling function has been made young again, so return to execute the
1127 // real frame set-up code.
1128 __ Br(x0);
1129}
1130
1131#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1132void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1133 MacroAssembler* masm) { \
1134 GenerateMakeCodeYoungAgainCommon(masm); \
1135} \
1136void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1137 MacroAssembler* masm) { \
1138 GenerateMakeCodeYoungAgainCommon(masm); \
1139}
1140CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1141#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1142
1143
1144void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1145 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1146 // that make_code_young doesn't do any garbage collection which allows us to
1147 // save/restore the registers without worrying about which of them contain
1148 // pointers.
1149
1150 // The following caller-saved registers must be saved and restored when
1151 // calling through to the runtime:
1152 // x0 - The address from which to resume execution.
1153 // x1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154 // x3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001155 // lr - The return address for the JSFunction itself. It has not yet been
1156 // preserved on the stack because the frame setup code was replaced
1157 // with a call to this stub, to handle code ageing.
1158 {
1159 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160 __ Push(x0, x1, x3, fp, lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001161 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1162 __ CallCFunction(
1163 ExternalReference::get_mark_code_as_executed_function(
1164 masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001165 __ Pop(lr, fp, x3, x1, x0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166
1167 // Perform prologue operations usually performed by the young code stub.
1168 __ EmitFrameSetupForCodeAgePatching(masm);
1169 }
1170
1171 // Jump to point after the code-age stub.
1172 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1173 __ Br(x0);
1174}
1175
1176
1177void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1178 GenerateMakeCodeYoungAgainCommon(masm);
1179}
1180
1181
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001182void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1183 Generate_MarkCodeAsExecutedOnce(masm);
1184}
1185
1186
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001187static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1188 SaveFPRegsMode save_doubles) {
1189 {
1190 FrameScope scope(masm, StackFrame::INTERNAL);
1191
1192 // Preserve registers across notification, this is important for compiled
1193 // stubs that tail call the runtime on deopts passing their parameters in
1194 // registers.
1195 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1196 // registers here? According to the comment above, we should only need to
1197 // preserve the registers with parameters.
1198 __ PushXRegList(kSafepointSavedRegisters);
1199 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001200 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201 __ PopXRegList(kSafepointSavedRegisters);
1202 }
1203
1204 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1205 __ Drop(1);
1206
1207 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1208 // into lr before it jumps here.
1209 __ Br(lr);
1210}
1211
1212
1213void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1214 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1215}
1216
1217
1218void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1219 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1220}
1221
1222
1223static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1224 Deoptimizer::BailoutType type) {
1225 {
1226 FrameScope scope(masm, StackFrame::INTERNAL);
1227 // Pass the deoptimization type to the runtime system.
1228 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1229 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001230 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001231 }
1232
1233 // Get the full codegen state from the stack and untag it.
1234 Register state = x6;
1235 __ Peek(state, 0);
1236 __ SmiUntag(state);
1237
1238 // Switch on the state.
1239 Label with_tos_register, unknown_state;
1240 __ CompareAndBranch(
1241 state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register);
1242 __ Drop(1); // Remove state.
1243 __ Ret();
1244
1245 __ Bind(&with_tos_register);
1246 // Reload TOS register.
1247 __ Peek(x0, kPointerSize);
1248 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state);
1249 __ Drop(2); // Remove state and TOS.
1250 __ Ret();
1251
1252 __ Bind(&unknown_state);
1253 __ Abort(kInvalidFullCodegenState);
1254}
1255
1256
1257void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1258 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1259}
1260
1261
1262void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1263 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1264}
1265
1266
1267void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1268 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1269}
1270
1271
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001272static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1273 Register function_template_info,
1274 Register scratch0, Register scratch1,
1275 Register scratch2,
1276 Label* receiver_check_failed) {
1277 Register signature = scratch0;
1278 Register map = scratch1;
1279 Register constructor = scratch2;
1280
1281 // If there is no signature, return the holder.
1282 __ Ldr(signature, FieldMemOperand(function_template_info,
1283 FunctionTemplateInfo::kSignatureOffset));
1284 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1285 Label receiver_check_passed;
1286 __ B(eq, &receiver_check_passed);
1287
1288 // Walk the prototype chain.
1289 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1290 Label prototype_loop_start;
1291 __ Bind(&prototype_loop_start);
1292
1293 // Get the constructor, if any
1294 __ GetMapConstructor(constructor, map, x16, x16);
1295 __ cmp(x16, Operand(JS_FUNCTION_TYPE));
1296 Label next_prototype;
1297 __ B(ne, &next_prototype);
1298 Register type = constructor;
1299 __ Ldr(type,
1300 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1301 __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1302
1303 // Loop through the chain of inheriting function templates.
1304 Label function_template_loop;
1305 __ Bind(&function_template_loop);
1306
1307 // If the signatures match, we have a compatible receiver.
1308 __ Cmp(signature, type);
1309 __ B(eq, &receiver_check_passed);
1310
1311 // If the current type is not a FunctionTemplateInfo, load the next prototype
1312 // in the chain.
1313 __ JumpIfSmi(type, &next_prototype);
1314 __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
1315 __ B(ne, &next_prototype);
1316
1317 // Otherwise load the parent function template and iterate.
1318 __ Ldr(type,
1319 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1320 __ B(&function_template_loop);
1321
1322 // Load the next prototype.
1323 __ Bind(&next_prototype);
1324 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1325 // End if the prototype is null or not hidden.
1326 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
1327 __ B(eq, receiver_check_failed);
1328 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1329 __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
1330 __ Tst(x16, Operand(Map::IsHiddenPrototype::kMask));
1331 __ B(eq, receiver_check_failed);
1332 // Iterate.
1333 __ B(&prototype_loop_start);
1334
1335 __ Bind(&receiver_check_passed);
1336}
1337
1338
1339void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1340 // ----------- S t a t e -------------
1341 // -- x0 : number of arguments excluding receiver
1342 // -- x1 : callee
1343 // -- lr : return address
1344 // -- sp[0] : last argument
1345 // -- ...
1346 // -- sp[8 * (argc - 1)] : first argument
1347 // -- sp[8 * argc] : receiver
1348 // -----------------------------------
1349
1350 // Load the FunctionTemplateInfo.
1351 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1352 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
1353
1354 // Do the compatible receiver check.
1355 Label receiver_check_failed;
1356 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1357 CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
1358
1359 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1360 // beginning of the code.
1361 __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
1362 __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
1363 __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
1364 __ Jump(x4);
1365
1366 // Compatible receiver check failed: throw an Illegal Invocation exception.
1367 __ Bind(&receiver_check_failed);
1368 // Drop the arguments (including the receiver)
1369 __ add(x0, x0, Operand(1));
1370 __ Drop(x0);
1371 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1372}
1373
1374
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001375void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1376 // Lookup the function in the JavaScript frame.
1377 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1378 {
1379 FrameScope scope(masm, StackFrame::INTERNAL);
1380 // Pass function as argument.
1381 __ Push(x0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001382 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001383 }
1384
1385 // If the code object is null, just return to the unoptimized code.
1386 Label skip;
1387 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1388 __ Ret();
1389
1390 __ Bind(&skip);
1391
1392 // Load deoptimization data from the code object.
1393 // <deopt_data> = <code>[#deoptimization_data_offset]
1394 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1395
1396 // Load the OSR entrypoint offset from the deoptimization data.
1397 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1398 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1399 DeoptimizationInputData::kOsrPcOffsetIndex)));
1400
1401 // Compute the target address = code_obj + header_size + osr_offset
1402 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1403 __ Add(x0, x0, x1);
1404 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1405
1406 // And "return" to the OSR entry point of the function.
1407 __ Ret();
1408}
1409
1410
1411void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1412 // We check the stack limit as indicator that recompilation might be done.
1413 Label ok;
1414 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1415 __ B(hs, &ok);
1416 {
1417 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 __ CallRuntime(Runtime::kStackGuard);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419 }
1420 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1421 RelocInfo::CODE_TARGET);
1422
1423 __ Bind(&ok);
1424 __ Ret();
1425}
1426
1427
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001428// static
1429void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1430 int field_index) {
1431 // ----------- S t a t e -------------
1432 // -- lr : return address
1433 // -- jssp[0] : receiver
1434 // -----------------------------------
1435 ASM_LOCATION("Builtins::Generate_DatePrototype_GetField");
1436
1437 // 1. Pop receiver into x0 and check that it's actually a JSDate object.
1438 Label receiver_not_date;
1439 {
1440 __ Pop(x0);
1441 __ JumpIfSmi(x0, &receiver_not_date);
1442 __ JumpIfNotObjectType(x0, x1, x2, JS_DATE_TYPE, &receiver_not_date);
1443 }
1444
1445 // 2. Load the specified date field, falling back to the runtime as necessary.
1446 if (field_index == JSDate::kDateValue) {
1447 __ Ldr(x0, FieldMemOperand(x0, JSDate::kValueOffset));
1448 } else {
1449 if (field_index < JSDate::kFirstUncachedField) {
1450 Label stamp_mismatch;
1451 __ Mov(x1, ExternalReference::date_cache_stamp(masm->isolate()));
1452 __ Ldr(x1, MemOperand(x1));
1453 __ Ldr(x2, FieldMemOperand(x0, JSDate::kCacheStampOffset));
1454 __ Cmp(x1, x2);
1455 __ B(ne, &stamp_mismatch);
1456 __ Ldr(x0, FieldMemOperand(
1457 x0, JSDate::kValueOffset + field_index * kPointerSize));
1458 __ Ret();
1459 __ Bind(&stamp_mismatch);
1460 }
1461 FrameScope scope(masm, StackFrame::INTERNAL);
1462 __ Mov(x1, Smi::FromInt(field_index));
1463 __ CallCFunction(
1464 ExternalReference::get_date_field_function(masm->isolate()), 2);
1465 }
1466 __ Ret();
1467
1468 // 3. Raise a TypeError if the receiver is not a date.
1469 __ Bind(&receiver_not_date);
1470 __ TailCallRuntime(Runtime::kThrowNotDateError);
1471}
1472
1473
1474// static
1475void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1476 // ----------- S t a t e -------------
1477 // -- x0 : argc
1478 // -- jssp[0] : argArray (if argc == 2)
1479 // -- jssp[8] : thisArg (if argc >= 1)
1480 // -- jssp[16] : receiver
1481 // -----------------------------------
1482 ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
1483
1484 Register argc = x0;
1485 Register arg_array = x0;
1486 Register receiver = x1;
1487 Register this_arg = x2;
1488 Register undefined_value = x3;
1489 Register null_value = x4;
1490
1491 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1492 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1493
1494 // 1. Load receiver into x1, argArray into x0 (if present), remove all
1495 // arguments from the stack (including the receiver), and push thisArg (if
1496 // present) instead.
1497 {
1498 // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
1499 // consistent state for a simple pop operation.
1500 __ Claim(2);
1501 __ Drop(argc);
1502
1503 // ----------- S t a t e -------------
1504 // -- x0 : argc
1505 // -- jssp[0] : argArray (dummy value if argc <= 1)
1506 // -- jssp[8] : thisArg (dummy value if argc == 0)
1507 // -- jssp[16] : receiver
1508 // -----------------------------------
1509 __ Cmp(argc, 1);
1510 __ Pop(arg_array, this_arg); // Overwrites argc.
1511 __ CmovX(this_arg, undefined_value, lo); // undefined if argc == 0.
1512 __ CmovX(arg_array, undefined_value, ls); // undefined if argc <= 1.
1513
1514 __ Peek(receiver, 0);
1515 __ Poke(this_arg, 0);
1516 }
1517
1518 // ----------- S t a t e -------------
1519 // -- x0 : argArray
1520 // -- x1 : receiver
1521 // -- x3 : undefined root value
1522 // -- jssp[0] : thisArg
1523 // -----------------------------------
1524
1525 // 2. Make sure the receiver is actually callable.
1526 Label receiver_not_callable;
1527 __ JumpIfSmi(receiver, &receiver_not_callable);
1528 __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1529 __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
1530 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
1531 &receiver_not_callable);
1532
1533 // 3. Tail call with no arguments if argArray is null or undefined.
1534 Label no_arguments;
1535 __ Cmp(arg_array, null_value);
1536 __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1537 __ B(eq, &no_arguments);
1538
1539 // 4a. Apply the receiver to the given argArray (passing undefined for
1540 // new.target in x3).
1541 DCHECK(undefined_value.Is(x3));
1542 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1543
1544 // 4b. The argArray is either null or undefined, so we tail call without any
1545 // arguments to the receiver.
1546 __ Bind(&no_arguments);
1547 {
1548 __ Mov(x0, 0);
1549 DCHECK(receiver.Is(x1));
1550 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1551 }
1552
1553 // 4c. The receiver is not callable, throw an appropriate TypeError.
1554 __ Bind(&receiver_not_callable);
1555 {
1556 __ Poke(receiver, 0);
1557 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1558 }
1559}
1560
1561
1562// static
1563void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001564 Register argc = x0;
1565 Register function = x1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001566 Register scratch1 = x10;
1567 Register scratch2 = x11;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001568
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001569 ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1570
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001571 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 {
1573 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001574 __ Cbnz(argc, &done);
1575 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1576 __ Push(scratch1);
1577 __ Mov(argc, 1);
1578 __ Bind(&done);
1579 }
1580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581 // 2. Get the callable to call (passed as receiver) from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001583
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001584 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001585 // (overwriting the original receiver). Adjust argument count to make
1586 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587 {
1588 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001589 // Calculate the copy start address (destination). Copy end address is jssp.
1590 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1591 __ Sub(scratch1, scratch2, kPointerSize);
1592
1593 __ Bind(&loop);
1594 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1595 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1596 __ Cmp(scratch1, jssp);
1597 __ B(ge, &loop);
1598 // Adjust the actual number of arguments and remove the top element
1599 // (which is a copy of the last argument).
1600 __ Sub(argc, argc, 1);
1601 __ Drop(1);
1602 }
1603
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001604 // 4. Call the callable.
1605 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001606}
1607
1608
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1610 // ----------- S t a t e -------------
1611 // -- x0 : argc
1612 // -- jssp[0] : argumentsList (if argc == 3)
1613 // -- jssp[8] : thisArgument (if argc >= 2)
1614 // -- jssp[16] : target (if argc >= 1)
1615 // -- jssp[24] : receiver
1616 // -----------------------------------
1617 ASM_LOCATION("Builtins::Generate_ReflectApply");
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001619 Register argc = x0;
1620 Register arguments_list = x0;
1621 Register target = x1;
1622 Register this_argument = x2;
1623 Register undefined_value = x3;
1624
1625 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1626
1627 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1628 // remove all arguments from the stack (including the receiver), and push
1629 // thisArgument (if present) instead.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001630 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001631 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1632 // consistent state for a simple pop operation.
1633 __ Claim(3);
1634 __ Drop(argc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001636 // ----------- S t a t e -------------
1637 // -- x0 : argc
1638 // -- jssp[0] : argumentsList (dummy value if argc <= 2)
1639 // -- jssp[8] : thisArgument (dummy value if argc <= 1)
1640 // -- jssp[16] : target (dummy value if argc == 0)
1641 // -- jssp[24] : receiver
1642 // -----------------------------------
1643 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1644 __ Pop(arguments_list, this_argument, target); // Overwrites argc.
1645 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1646 __ Cmp(x10, 2);
1647 __ CmovX(this_argument, undefined_value, lo); // undefined if argc <= 1.
1648 __ CmovX(arguments_list, undefined_value, ls); // undefined if argc <= 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001650 __ Poke(this_argument, 0); // Overwrite receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001651 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001652
1653 // ----------- S t a t e -------------
1654 // -- x0 : argumentsList
1655 // -- x1 : target
1656 // -- jssp[0] : thisArgument
1657 // -----------------------------------
1658
1659 // 2. Make sure the target is actually callable.
1660 Label target_not_callable;
1661 __ JumpIfSmi(target, &target_not_callable);
1662 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1663 __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1664 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
1665
1666 // 3a. Apply the target to the given argumentsList (passing undefined for
1667 // new.target in x3).
1668 DCHECK(undefined_value.Is(x3));
1669 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1670
1671 // 3b. The target is not callable, throw an appropriate TypeError.
1672 __ Bind(&target_not_callable);
1673 {
1674 __ Poke(target, 0);
1675 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1676 }
1677}
1678
1679
1680void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1681 // ----------- S t a t e -------------
1682 // -- x0 : argc
1683 // -- jssp[0] : new.target (optional)
1684 // -- jssp[8] : argumentsList
1685 // -- jssp[16] : target
1686 // -- jssp[24] : receiver
1687 // -----------------------------------
1688 ASM_LOCATION("Builtins::Generate_ReflectConstruct");
1689
1690 Register argc = x0;
1691 Register arguments_list = x0;
1692 Register target = x1;
1693 Register new_target = x3;
1694 Register undefined_value = x4;
1695
1696 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1697
1698 // 1. Load target into x1 (if present), argumentsList into x0 (if present),
1699 // new.target into x3 (if present, otherwise use target), remove all
1700 // arguments from the stack (including the receiver), and push thisArgument
1701 // (if present) instead.
1702 {
1703 // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
1704 // consistent state for a simple pop operation.
1705 __ Claim(3);
1706 __ Drop(argc);
1707
1708 // ----------- S t a t e -------------
1709 // -- x0 : argc
1710 // -- jssp[0] : new.target (dummy value if argc <= 2)
1711 // -- jssp[8] : argumentsList (dummy value if argc <= 1)
1712 // -- jssp[16] : target (dummy value if argc == 0)
1713 // -- jssp[24] : receiver
1714 // -----------------------------------
1715 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
1716 __ Pop(new_target, arguments_list, target); // Overwrites argc.
1717 __ CmovX(target, undefined_value, eq); // undefined if argc == 0.
1718 __ Cmp(x10, 2);
1719 __ CmovX(arguments_list, undefined_value, lo); // undefined if argc <= 1.
1720 __ CmovX(new_target, target, ls); // target if argc <= 2.
1721
1722 __ Poke(undefined_value, 0); // Overwrite receiver.
1723 }
1724
1725 // ----------- S t a t e -------------
1726 // -- x0 : argumentsList
1727 // -- x1 : target
1728 // -- x3 : new.target
1729 // -- jssp[0] : receiver (undefined)
1730 // -----------------------------------
1731
1732 // 2. Make sure the target is actually a constructor.
1733 Label target_not_constructor;
1734 __ JumpIfSmi(target, &target_not_constructor);
1735 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
1736 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1737 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1738 &target_not_constructor);
1739
1740 // 3. Make sure the new.target is actually a constructor.
1741 Label new_target_not_constructor;
1742 __ JumpIfSmi(new_target, &new_target_not_constructor);
1743 __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
1744 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
1745 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
1746 &new_target_not_constructor);
1747
1748 // 4a. Construct the target with the given new.target and argumentsList.
1749 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1750
1751 // 4b. The target is not a constructor, throw an appropriate TypeError.
1752 __ Bind(&target_not_constructor);
1753 {
1754 __ Poke(target, 0);
1755 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1756 }
1757
1758 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1759 __ Bind(&new_target_not_constructor);
1760 {
1761 __ Poke(new_target, 0);
1762 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1763 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001764}
1765
1766
1767static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1768 Label* stack_overflow) {
1769 // ----------- S t a t e -------------
1770 // -- x0 : actual number of arguments
1771 // -- x1 : function (passed through to callee)
1772 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001773 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001774 // -----------------------------------
1775 // Check the stack for overflow.
1776 // We are not trying to catch interruptions (e.g. debug break and
1777 // preemption) here, so the "real stack limit" is checked.
1778 Label enough_stack_space;
1779 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1780 // Make x10 the space we have left. The stack might already be overflowed
1781 // here which will cause x10 to become negative.
1782 __ Sub(x10, jssp, x10);
1783 // Check if the arguments will overflow the stack.
1784 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
1785 __ B(le, stack_overflow);
1786}
1787
1788
1789static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1790 __ SmiTag(x10, x0);
1791 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1792 __ Push(lr, fp);
1793 __ Push(x11, x1, x10);
1794 __ Add(fp, jssp,
1795 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
1796}
1797
1798
1799static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1800 // ----------- S t a t e -------------
1801 // -- x0 : result being passed through
1802 // -----------------------------------
1803 // Get the number of arguments passed (as a smi), tear down the frame and
1804 // then drop the parameters and the receiver.
1805 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1806 kPointerSize)));
1807 __ Mov(jssp, fp);
1808 __ Pop(fp, lr);
1809 __ DropBySMI(x10, kXRegSize);
1810 __ Drop(1);
1811}
1812
1813
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001814// static
1815void Builtins::Generate_Apply(MacroAssembler* masm) {
1816 // ----------- S t a t e -------------
1817 // -- x0 : argumentsList
1818 // -- x1 : target
1819 // -- x3 : new.target (checked to be constructor or undefined)
1820 // -- jssp[0] : thisArgument
1821 // -----------------------------------
1822
1823 Register arguments_list = x0;
1824 Register target = x1;
1825 Register new_target = x3;
1826
1827 Register args = x0;
1828 Register len = x2;
1829
1830 // Create the list of arguments from the array-like argumentsList.
1831 {
1832 Label create_arguments, create_array, create_runtime, done_create;
1833 __ JumpIfSmi(arguments_list, &create_runtime);
1834
1835 // Load native context.
1836 Register native_context = x4;
1837 __ Ldr(native_context, NativeContextMemOperand());
1838
1839 // Load the map of argumentsList.
1840 Register arguments_list_map = x2;
1841 __ Ldr(arguments_list_map,
1842 FieldMemOperand(arguments_list, HeapObject::kMapOffset));
1843
1844 // Check if argumentsList is an (unmodified) arguments object.
1845 __ Ldr(x10, ContextMemOperand(native_context,
1846 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1847 __ Ldr(x11, ContextMemOperand(native_context,
1848 Context::STRICT_ARGUMENTS_MAP_INDEX));
1849 __ Cmp(arguments_list_map, x10);
1850 __ Ccmp(arguments_list_map, x11, ZFlag, ne);
1851 __ B(eq, &create_arguments);
1852
1853 // Check if argumentsList is a fast JSArray.
1854 __ CompareInstanceType(arguments_list_map, native_context, JS_ARRAY_TYPE);
1855 __ B(eq, &create_array);
1856
1857 // Ask the runtime to create the list (actually a FixedArray).
1858 __ Bind(&create_runtime);
1859 {
1860 FrameScope scope(masm, StackFrame::INTERNAL);
1861 __ Push(target, new_target, arguments_list);
1862 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1863 __ Pop(new_target, target);
1864 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
1865 FixedArray::kLengthOffset));
1866 }
1867 __ B(&done_create);
1868
1869 // Try to create the list from an arguments object.
1870 __ Bind(&create_arguments);
1871 __ Ldrsw(len, UntagSmiFieldMemOperand(
1872 arguments_list,
1873 JSObject::kHeaderSize +
1874 Heap::kArgumentsLengthIndex * kPointerSize));
1875 __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
1876 __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
1877 __ CompareAndBranch(len, x11, ne, &create_runtime);
1878 __ Mov(args, x10);
1879 __ B(&done_create);
1880
1881 // Try to create the list from a JSArray object.
1882 __ Bind(&create_array);
1883 __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
1884 __ DecodeField<Map::ElementsKindBits>(x10);
1885 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1886 STATIC_ASSERT(FAST_ELEMENTS == 2);
1887 // Branch for anything that's not FAST_{SMI_}ELEMENTS.
1888 __ TestAndBranchIfAnySet(x10, ~FAST_ELEMENTS, &create_runtime);
1889 __ Ldrsw(len,
1890 UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
1891 __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
1892
1893 __ Bind(&done_create);
1894 }
1895
1896 // Check for stack overflow.
1897 {
1898 // Check the stack for overflow. We are not trying to catch interruptions
1899 // (i.e. debug break and preemption) here, so check the "real stack limit".
1900 Label done;
1901 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1902 // Make x10 the space we have left. The stack might already be overflowed
1903 // here which will cause x10 to become negative.
1904 __ Sub(x10, masm->StackPointer(), x10);
1905 // Check if the arguments will overflow the stack.
1906 __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
1907 __ B(gt, &done); // Signed comparison.
1908 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1909 __ Bind(&done);
1910 }
1911
1912 // ----------- S t a t e -------------
1913 // -- x0 : args (a FixedArray built from argumentsList)
1914 // -- x1 : target
1915 // -- x2 : len (number of elements to push from args)
1916 // -- x3 : new.target (checked to be constructor or undefined)
1917 // -- jssp[0] : thisArgument
1918 // -----------------------------------
1919
1920 // Push arguments onto the stack (thisArgument is already on the stack).
1921 {
1922 Label done, loop;
1923 Register src = x4;
1924
1925 __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
1926 __ Mov(x0, len); // The 'len' argument for Call() or Construct().
1927 __ Cbz(len, &done);
1928 __ Claim(len);
1929 __ Bind(&loop);
1930 __ Sub(len, len, 1);
1931 __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
1932 __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
1933 __ Cbnz(len, &loop);
1934 __ Bind(&done);
1935 }
1936
1937 // ----------- S t a t e -------------
1938 // -- x0 : argument count (len)
1939 // -- x1 : target
1940 // -- x3 : new.target (checked to be constructor or undefined)
1941 // -- jssp[0] : args[len-1]
1942 // -- jssp[8] : args[len-2]
1943 // ... : ...
1944 // -- jssp[8*(len-2)] : args[1]
1945 // -- jssp[8*(len-1)] : args[0]
1946 // -----------------------------------
1947
1948 // Dispatch to Call or Construct depending on whether new.target is undefined.
1949 {
1950 __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
1951 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1952 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1953 }
1954}
1955
1956
1957// static
1958void Builtins::Generate_CallFunction(MacroAssembler* masm,
1959 ConvertReceiverMode mode) {
1960 ASM_LOCATION("Builtins::Generate_CallFunction");
1961 // ----------- S t a t e -------------
1962 // -- x0 : the number of arguments (not including the receiver)
1963 // -- x1 : the function to call (checked to be a JSFunction)
1964 // -----------------------------------
1965 __ AssertFunction(x1);
1966
1967 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1968 // Check that function is not a "classConstructor".
1969 Label class_constructor;
1970 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1971 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
1972 __ TestAndBranchIfAnySet(
1973 w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
1974 (1 << SharedFunctionInfo::kIsSubclassConstructor) |
1975 (1 << SharedFunctionInfo::kIsBaseConstructor),
1976 &class_constructor);
1977
1978 // Enter the context of the function; ToObject has to run in the function
1979 // context, and we also need to take the global proxy from the function
1980 // context in case of conversion.
1981 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
1982 // We need to convert the receiver for non-native sloppy mode functions.
1983 Label done_convert;
1984 __ TestAndBranchIfAnySet(w3,
1985 (1 << SharedFunctionInfo::kNative) |
1986 (1 << SharedFunctionInfo::kStrictModeFunction),
1987 &done_convert);
1988 {
1989 // ----------- S t a t e -------------
1990 // -- x0 : the number of arguments (not including the receiver)
1991 // -- x1 : the function to call (checked to be a JSFunction)
1992 // -- x2 : the shared function info.
1993 // -- cp : the function context.
1994 // -----------------------------------
1995
1996 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1997 // Patch receiver to global proxy.
1998 __ LoadGlobalProxy(x3);
1999 } else {
2000 Label convert_to_object, convert_receiver;
2001 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2002 __ JumpIfSmi(x3, &convert_to_object);
2003 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2004 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2005 __ B(hs, &done_convert);
2006 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2007 Label convert_global_proxy;
2008 __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
2009 &convert_global_proxy);
2010 __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
2011 __ Bind(&convert_global_proxy);
2012 {
2013 // Patch receiver to global proxy.
2014 __ LoadGlobalProxy(x3);
2015 }
2016 __ B(&convert_receiver);
2017 }
2018 __ Bind(&convert_to_object);
2019 {
2020 // Convert receiver using ToObject.
2021 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2022 // in the fast case? (fall back to AllocateInNewSpace?)
2023 FrameScope scope(masm, StackFrame::INTERNAL);
2024 __ SmiTag(x0);
2025 __ Push(x0, x1);
2026 __ Mov(x0, x3);
2027 ToObjectStub stub(masm->isolate());
2028 __ CallStub(&stub);
2029 __ Mov(x3, x0);
2030 __ Pop(x1, x0);
2031 __ SmiUntag(x0);
2032 }
2033 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2034 __ Bind(&convert_receiver);
2035 }
2036 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2037 }
2038 __ Bind(&done_convert);
2039
2040 // ----------- S t a t e -------------
2041 // -- x0 : the number of arguments (not including the receiver)
2042 // -- x1 : the function to call (checked to be a JSFunction)
2043 // -- x2 : the shared function info.
2044 // -- cp : the function context.
2045 // -----------------------------------
2046
2047 __ Ldrsw(
2048 x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2049 ParameterCount actual(x0);
2050 ParameterCount expected(x2);
2051 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
2052 CheckDebugStepCallWrapper());
2053
2054 // The function is a "classConstructor", need to raise an exception.
2055 __ bind(&class_constructor);
2056 {
2057 FrameScope frame(masm, StackFrame::INTERNAL);
2058 __ Push(x1);
2059 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2060 }
2061}
2062
2063
2064namespace {
2065
2066void Generate_PushBoundArguments(MacroAssembler* masm) {
2067 // ----------- S t a t e -------------
2068 // -- x0 : the number of arguments (not including the receiver)
2069 // -- x1 : target (checked to be a JSBoundFunction)
2070 // -- x3 : new.target (only in case of [[Construct]])
2071 // -----------------------------------
2072
2073 // Load [[BoundArguments]] into x2 and length of that into x4.
2074 Label no_bound_arguments;
2075 __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2076 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2077 __ Cmp(x4, 0);
2078 __ B(eq, &no_bound_arguments);
2079 {
2080 // ----------- S t a t e -------------
2081 // -- x0 : the number of arguments (not including the receiver)
2082 // -- x1 : target (checked to be a JSBoundFunction)
2083 // -- x2 : the [[BoundArguments]] (implemented as FixedArray)
2084 // -- x3 : new.target (only in case of [[Construct]])
2085 // -- x4 : the number of [[BoundArguments]]
2086 // -----------------------------------
2087
2088 // Reserve stack space for the [[BoundArguments]].
2089 {
2090 Label done;
2091 __ Claim(x4);
2092 // Check the stack for overflow. We are not trying to catch interruptions
2093 // (i.e. debug break and preemption) here, so check the "real stack
2094 // limit".
2095 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
2096 __ B(gt, &done); // Signed comparison.
2097 // Restore the stack pointer.
2098 __ Drop(x4);
2099 {
2100 FrameScope scope(masm, StackFrame::MANUAL);
2101 __ EnterFrame(StackFrame::INTERNAL);
2102 __ CallRuntime(Runtime::kThrowStackOverflow);
2103 }
2104 __ Bind(&done);
2105 }
2106
2107 // Relocate arguments down the stack.
2108 {
2109 Label loop, done_loop;
2110 __ Mov(x5, 0);
2111 __ Bind(&loop);
2112 __ Cmp(x5, x0);
2113 __ B(gt, &done_loop);
2114 __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
2115 __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
2116 __ Add(x4, x4, 1);
2117 __ Add(x5, x5, 1);
2118 __ B(&loop);
2119 __ Bind(&done_loop);
2120 }
2121
2122 // Copy [[BoundArguments]] to the stack (below the arguments).
2123 {
2124 Label loop;
2125 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2126 __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
2127 __ Bind(&loop);
2128 __ Sub(x4, x4, 1);
2129 __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
2130 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2131 __ Add(x0, x0, 1);
2132 __ Cmp(x4, 0);
2133 __ B(gt, &loop);
2134 }
2135 }
2136 __ Bind(&no_bound_arguments);
2137}
2138
2139} // namespace
2140
2141
2142// static
2143void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
2144 // ----------- S t a t e -------------
2145 // -- x0 : the number of arguments (not including the receiver)
2146 // -- x1 : the function to call (checked to be a JSBoundFunction)
2147 // -----------------------------------
2148 __ AssertBoundFunction(x1);
2149
2150 // Patch the receiver to [[BoundThis]].
2151 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2152 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2153
2154 // Push the [[BoundArguments]] onto the stack.
2155 Generate_PushBoundArguments(masm);
2156
2157 // Call the [[BoundTargetFunction]] via the Call builtin.
2158 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2159 __ Mov(x10,
2160 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2161 __ Ldr(x11, MemOperand(x10));
2162 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2163 __ Br(x12);
2164}
2165
2166
2167// static
2168void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2169 // ----------- S t a t e -------------
2170 // -- x0 : the number of arguments (not including the receiver)
2171 // -- x1 : the target to call (can be any Object).
2172 // -----------------------------------
2173
2174 Label non_callable, non_function, non_smi;
2175 __ JumpIfSmi(x1, &non_callable);
2176 __ Bind(&non_smi);
2177 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2178 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2179 RelocInfo::CODE_TARGET, eq);
2180 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2181 __ Jump(masm->isolate()->builtins()->CallBoundFunction(),
2182 RelocInfo::CODE_TARGET, eq);
2183 __ Cmp(x5, JS_PROXY_TYPE);
2184 __ B(ne, &non_function);
2185
2186 // 1. Runtime fallback for Proxy [[Call]].
2187 __ Push(x1);
2188 // Increase the arguments size to include the pushed function and the
2189 // existing receiver on the stack.
2190 __ Add(x0, x0, Operand(2));
2191 // Tail-call to the runtime.
2192 __ JumpToExternalReference(
2193 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2194
2195 // 2. Call to something else, which might have a [[Call]] internal method (if
2196 // not we raise an exception).
2197 __ Bind(&non_function);
2198 // Check if target has a [[Call]] internal method.
2199 __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2200 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
2201 // Overwrite the original receiver with the (original) target.
2202 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2203 // Let the "call_as_function_delegate" take care of the rest.
2204 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2205 __ Jump(masm->isolate()->builtins()->CallFunction(
2206 ConvertReceiverMode::kNotNullOrUndefined),
2207 RelocInfo::CODE_TARGET);
2208
2209 // 3. Call to something that is not callable.
2210 __ bind(&non_callable);
2211 {
2212 FrameScope scope(masm, StackFrame::INTERNAL);
2213 __ Push(x1);
2214 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2215 }
2216}
2217
2218
2219// static
2220void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2221 // ----------- S t a t e -------------
2222 // -- x0 : the number of arguments (not including the receiver)
2223 // -- x1 : the constructor to call (checked to be a JSFunction)
2224 // -- x3 : the new target (checked to be a constructor)
2225 // -----------------------------------
2226 __ AssertFunction(x1);
2227
2228 // Calling convention for function specific ConstructStubs require
2229 // x2 to contain either an AllocationSite or undefined.
2230 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2231
2232 // Tail call to the function-specific construct stub (still in the caller
2233 // context at this point).
2234 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2235 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2236 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2237 __ Br(x4);
2238}
2239
2240
2241// static
2242void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2243 // ----------- S t a t e -------------
2244 // -- x0 : the number of arguments (not including the receiver)
2245 // -- x1 : the function to call (checked to be a JSBoundFunction)
2246 // -- x3 : the new target (checked to be a constructor)
2247 // -----------------------------------
2248 __ AssertBoundFunction(x1);
2249
2250 // Push the [[BoundArguments]] onto the stack.
2251 Generate_PushBoundArguments(masm);
2252
2253 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2254 {
2255 Label done;
2256 __ Cmp(x1, x3);
2257 __ B(ne, &done);
2258 __ Ldr(x3,
2259 FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2260 __ Bind(&done);
2261 }
2262
2263 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2264 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2265 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2266 __ Ldr(x11, MemOperand(x10));
2267 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2268 __ Br(x12);
2269}
2270
2271
2272// static
2273void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2274 // ----------- S t a t e -------------
2275 // -- x0 : the number of arguments (not including the receiver)
2276 // -- x1 : the constructor to call (checked to be a JSProxy)
2277 // -- x3 : the new target (either the same as the constructor or
2278 // the JSFunction on which new was invoked initially)
2279 // -----------------------------------
2280
2281 // Call into the Runtime for Proxy [[Construct]].
2282 __ Push(x1);
2283 __ Push(x3);
2284 // Include the pushed new_target, constructor and the receiver.
2285 __ Add(x0, x0, 3);
2286 // Tail-call to the runtime.
2287 __ JumpToExternalReference(
2288 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2289}
2290
2291
2292// static
2293void Builtins::Generate_Construct(MacroAssembler* masm) {
2294 // ----------- S t a t e -------------
2295 // -- x0 : the number of arguments (not including the receiver)
2296 // -- x1 : the constructor to call (can be any Object)
2297 // -- x3 : the new target (either the same as the constructor or
2298 // the JSFunction on which new was invoked initially)
2299 // -----------------------------------
2300
2301 // Check if target is a Smi.
2302 Label non_constructor;
2303 __ JumpIfSmi(x1, &non_constructor);
2304
2305 // Dispatch based on instance type.
2306 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2307 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2308 RelocInfo::CODE_TARGET, eq);
2309
2310 // Check if target has a [[Construct]] internal method.
2311 __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
2312 __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
2313
2314 // Only dispatch to bound functions after checking whether they are
2315 // constructors.
2316 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2317 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2318 RelocInfo::CODE_TARGET, eq);
2319
2320 // Only dispatch to proxies after checking whether they are constructors.
2321 __ Cmp(x5, JS_PROXY_TYPE);
2322 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2323 eq);
2324
2325 // Called Construct on an exotic Object with a [[Construct]] internal method.
2326 {
2327 // Overwrite the original receiver with the (original) target.
2328 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2329 // Let the "call_as_constructor_delegate" take care of the rest.
2330 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
2331 __ Jump(masm->isolate()->builtins()->CallFunction(),
2332 RelocInfo::CODE_TARGET);
2333 }
2334
2335 // Called Construct on an Object that doesn't have a [[Construct]] internal
2336 // method.
2337 __ bind(&non_constructor);
2338 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2339 RelocInfo::CODE_TARGET);
2340}
2341
2342
2343// static
2344void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
2345 // ----------- S t a t e -------------
2346 // -- x0 : the number of arguments (not including the receiver)
2347 // -- x2 : the address of the first argument to be pushed. Subsequent
2348 // arguments should be consecutive above this, in the same order as
2349 // they are to be pushed onto the stack.
2350 // -- x1 : the target to call (can be any Object).
2351 // -----------------------------------
2352
2353 // Find the address of the last argument.
2354 __ add(x3, x0, Operand(1)); // Add one for receiver.
2355 __ lsl(x3, x3, kPointerSizeLog2);
2356 __ sub(x4, x2, x3);
2357
2358 // Push the arguments.
2359 Label loop_header, loop_check;
2360 __ Mov(x5, jssp);
2361 __ Claim(x3, 1);
2362 __ B(&loop_check);
2363 __ Bind(&loop_header);
2364 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
2365 __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
2366 __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
2367 __ Bind(&loop_check);
2368 __ Cmp(x2, x4);
2369 __ B(gt, &loop_header);
2370
2371 // Call the target.
2372 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2373}
2374
2375
2376// static
2377void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
2378 // ----------- S t a t e -------------
2379 // -- x0 : argument count (not including receiver)
2380 // -- x3 : new target
2381 // -- x1 : constructor to call
2382 // -- x2 : address of the first argument
2383 // -----------------------------------
2384
2385 // Find the address of the last argument.
2386 __ add(x5, x0, Operand(1)); // Add one for receiver (to be constructed).
2387 __ lsl(x5, x5, kPointerSizeLog2);
2388
2389 // Set stack pointer and where to stop.
2390 __ Mov(x6, jssp);
2391 __ Claim(x5, 1);
2392 __ sub(x4, x6, x5);
2393
2394 // Push a slot for the receiver.
2395 __ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
2396
2397 Label loop_header, loop_check;
2398 // Push the arguments.
2399 __ B(&loop_check);
2400 __ Bind(&loop_header);
2401 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
2402 __ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
2403 __ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
2404 __ Bind(&loop_check);
2405 __ Cmp(x6, x4);
2406 __ B(gt, &loop_header);
2407
2408 // Call the constructor with x0, x1, and x3 unmodified.
2409 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2410}
2411
2412
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002413void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2414 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
2415 // ----------- S t a t e -------------
2416 // -- x0 : actual number of arguments
2417 // -- x1 : function (passed through to callee)
2418 // -- x2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002419 // -- x3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002420 // -----------------------------------
2421
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002422 Register argc_actual = x0; // Excluding the receiver.
2423 Register argc_expected = x2; // Excluding the receiver.
2424 Register function = x1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002425 Register code_entry = x10;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002426
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002427 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002428
2429 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002430 __ Cmp(argc_actual, argc_expected);
2431 __ B(lt, &too_few);
2432 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2433 __ B(eq, &dont_adapt_arguments);
2434
2435 { // Enough parameters: actual >= expected
2436 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002437 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002438
2439 Register copy_start = x10;
2440 Register copy_end = x11;
2441 Register copy_to = x12;
2442 Register scratch1 = x13, scratch2 = x14;
2443
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002444 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002445
2446 // Adjust for fp, lr, and the receiver.
2447 __ Add(copy_start, fp, 3 * kPointerSize);
2448 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002449 __ Sub(copy_end, copy_start, scratch2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002450 __ Sub(copy_end, copy_end, kPointerSize);
2451 __ Mov(copy_to, jssp);
2452
2453 // Claim space for the arguments, the receiver, and one extra slot.
2454 // The extra slot ensures we do not write under jssp. It will be popped
2455 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002456 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002457 __ Claim(scratch1, 1);
2458
2459 // Copy the arguments (including the receiver) to the new stack frame.
2460 Label copy_2_by_2;
2461 __ Bind(&copy_2_by_2);
2462 __ Ldp(scratch1, scratch2,
2463 MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
2464 __ Stp(scratch1, scratch2,
2465 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2466 __ Cmp(copy_start, copy_end);
2467 __ B(hi, &copy_2_by_2);
2468
2469 // Correct the space allocated for the extra slot.
2470 __ Drop(1);
2471
2472 __ B(&invoke);
2473 }
2474
2475 { // Too few parameters: Actual < expected
2476 __ Bind(&too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002477
2478 Register copy_from = x10;
2479 Register copy_end = x11;
2480 Register copy_to = x12;
2481 Register scratch1 = x13, scratch2 = x14;
2482
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002483 // If the function is strong we need to throw an error.
2484 Label no_strong_error;
2485 __ Ldr(scratch1,
2486 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2487 __ Ldr(scratch2.W(),
2488 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset));
2489 __ TestAndBranchIfAllClear(scratch2.W(),
2490 (1 << SharedFunctionInfo::kStrongModeFunction),
2491 &no_strong_error);
2492
2493 // What we really care about is the required number of arguments.
2494 DCHECK_EQ(kPointerSize, kInt64Size);
2495 __ Ldr(scratch2.W(),
2496 FieldMemOperand(scratch1, SharedFunctionInfo::kLengthOffset));
2497 __ Cmp(argc_actual, Operand(scratch2, LSR, 1));
2498 __ B(ge, &no_strong_error);
2499
2500 {
2501 FrameScope frame(masm, StackFrame::MANUAL);
2502 EnterArgumentsAdaptorFrame(masm);
2503 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2504 }
2505
2506 __ Bind(&no_strong_error);
2507 EnterArgumentsAdaptorFrame(masm);
2508 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2509
2510 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002511 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
2512
2513 // Adjust for fp, lr, and the receiver.
2514 __ Add(copy_from, fp, 3 * kPointerSize);
2515 __ Add(copy_from, copy_from, argc_actual);
2516 __ Mov(copy_to, jssp);
2517 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
2518 __ Sub(copy_end, copy_end, argc_actual);
2519
2520 // Claim space for the arguments, the receiver, and one extra slot.
2521 // The extra slot ensures we do not write under jssp. It will be popped
2522 // later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002523 __ Add(scratch1, scratch2, 2 * kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002524 __ Claim(scratch1, 1);
2525
2526 // Copy the arguments (including the receiver) to the new stack frame.
2527 Label copy_2_by_2;
2528 __ Bind(&copy_2_by_2);
2529 __ Ldp(scratch1, scratch2,
2530 MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
2531 __ Stp(scratch1, scratch2,
2532 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2533 __ Cmp(copy_to, copy_end);
2534 __ B(hi, &copy_2_by_2);
2535
2536 __ Mov(copy_to, copy_end);
2537
2538 // Fill the remaining expected arguments with undefined.
2539 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
2540 __ Add(copy_end, jssp, kPointerSize);
2541
2542 Label fill;
2543 __ Bind(&fill);
2544 __ Stp(scratch1, scratch1,
2545 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
2546 __ Cmp(copy_to, copy_end);
2547 __ B(hi, &fill);
2548
2549 // Correct the space allocated for the extra slot.
2550 __ Drop(1);
2551 }
2552
2553 // Arguments have been adapted. Now call the entry point.
2554 __ Bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002555 __ Mov(argc_actual, argc_expected);
2556 // x0 : expected number of arguments
2557 // x1 : function (passed through to callee)
2558 // x3 : new target (passed through to callee)
2559 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002560 __ Call(code_entry);
2561
2562 // Store offset of return address for deoptimizer.
2563 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2564
2565 // Exit frame and return.
2566 LeaveArgumentsAdaptorFrame(masm);
2567 __ Ret();
2568
2569 // Call the entry point without adapting the arguments.
2570 __ Bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002571 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002572 __ Jump(code_entry);
2573
2574 __ Bind(&stack_overflow);
2575 {
2576 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002577 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002578 __ Unreachable();
2579 }
2580}
2581
2582
2583#undef __
2584
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002585} // namespace internal
2586} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002587
2588#endif // V8_TARGET_ARCH_ARM