blob: 031b483cca13474be4dca6f4f1e2b9645d5880ea [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
Leon Clarkee46be812010-01-19 14:06:41 +000020void Builtins::Generate_Adaptor(MacroAssembler* masm,
21 CFunctionId id,
22 BuiltinExtraArguments extra_args) {
23 // ----------- S t a t e -------------
24 // -- r0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025 // -- r1 : target
26 // -- r3 : new.target
Leon Clarkee46be812010-01-19 14:06:41 +000027 // -- sp[0] : last argument
28 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 // -- sp[4 * (argc - 1)] : first argument
Leon Clarkee46be812010-01-19 14:06:41 +000030 // -- sp[4 * argc] : receiver
31 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(r1);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +000039
Leon Clarkee46be812010-01-19 14:06:41 +000040 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 switch (extra_args) {
43 case BuiltinExtraArguments::kTarget:
44 __ Push(r1);
45 ++num_extra_args;
46 break;
47 case BuiltinExtraArguments::kNewTarget:
48 __ Push(r3);
49 ++num_extra_args;
50 break;
51 case BuiltinExtraArguments::kTargetAndNewTarget:
52 __ Push(r1, r3);
53 num_extra_args += 2;
54 break;
55 case BuiltinExtraArguments::kNone:
56 break;
Leon Clarkee46be812010-01-19 14:06:41 +000057 }
58
Steve Block6ded16b2010-05-10 14:33:55 +010059 // JumpToExternalReference expects r0 to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000060 // including the receiver and the extra arguments.
61 __ add(r0, r0, Operand(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062
Steve Block44f0eee2011-05-26 01:26:41 +010063 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000064}
65
66
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067// Load the built-in InternalArray function from the current context.
68static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
69 Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 // Load the InternalArray function from the current native context.
71 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010072}
73
74
Steve Blocka7e24c12009-10-30 11:49:00 +000075// Load the built-in Array function from the current context.
76static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000077 // Load the Array function from the current native context.
78 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Steve Blocka7e24c12009-10-30 11:49:00 +000079}
80
81
Ben Murdoch3ef787d2012-04-12 10:51:47 +010082void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
83 // ----------- S t a t e -------------
84 // -- r0 : number of arguments
85 // -- lr : return address
86 // -- sp[...]: constructor arguments
87 // -----------------------------------
88 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
89
90 // Get the InternalArray function.
91 GenerateLoadInternalArrayFunction(masm, r1);
92
93 if (FLAG_debug_code) {
94 // Initial map for the builtin InternalArray functions should be maps.
95 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 __ SmiTst(r2);
97 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010098 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100100 }
101
102 // Run the native code for the InternalArray function called as a normal
103 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 // tail call a stub
105 InternalArrayConstructorStub stub(masm->isolate());
106 __ TailCallStub(&stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000107}
108
109
110void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
111 // ----------- S t a t e -------------
112 // -- r0 : number of arguments
113 // -- lr : return address
114 // -- sp[...]: constructor arguments
115 // -----------------------------------
116 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
117
118 // Get the Array function.
119 GenerateLoadArrayFunction(masm, r1);
120
121 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100122 // Initial map for the builtin Array functions should be maps.
Steve Blocka7e24c12009-10-30 11:49:00 +0000123 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 __ SmiTst(r2);
125 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
Steve Blocka7e24c12009-10-30 11:49:00 +0000126 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 }
129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 __ mov(r3, r1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132 // tail call a stub
133 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
134 ArrayConstructorStub stub(masm->isolate());
135 __ TailCallStub(&stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000136}
137
138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100140void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
141 // ----------- S t a t e -------------
142 // -- r0 : number of arguments
143 // -- lr : return address
144 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
145 // -- sp[(argc + 1) * 8] : receiver
146 // -----------------------------------
147 Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
148 Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
149 Heap::RootListIndex const root_index =
150 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
151 : Heap::kMinusInfinityValueRootIndex;
152 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
153
154 // Load the accumulator with the default return value (either -Infinity or
155 // +Infinity), with the tagged value in r1 and the double value in d1.
156 __ LoadRoot(r1, root_index);
157 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
158
159 // Remember how many slots to drop (including the receiver).
160 __ add(r4, r0, Operand(1));
161
162 Label done_loop, loop;
163 __ bind(&loop);
164 {
165 // Check if all parameters done.
166 __ sub(r0, r0, Operand(1), SetCC);
167 __ b(lt, &done_loop);
168
169 // Load the next parameter tagged value into r2.
170 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
171
172 // Load the double value of the parameter into d2, maybe converting the
173 // parameter to a number first using the ToNumberStub if necessary.
174 Label convert, convert_smi, convert_number, done_convert;
175 __ bind(&convert);
176 __ JumpIfSmi(r2, &convert_smi);
177 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
178 __ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
179 {
180 // Parameter is not a Number, use the ToNumberStub to convert it.
181 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
182 __ SmiTag(r0);
183 __ SmiTag(r4);
184 __ Push(r0, r1, r4);
185 __ mov(r0, r2);
186 ToNumberStub stub(masm->isolate());
187 __ CallStub(&stub);
188 __ mov(r2, r0);
189 __ Pop(r0, r1, r4);
190 {
191 // Restore the double accumulator value (d1).
192 Label done_restore;
193 __ SmiToDouble(d1, r1);
194 __ JumpIfSmi(r1, &done_restore);
195 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
196 __ bind(&done_restore);
197 }
198 __ SmiUntag(r4);
199 __ SmiUntag(r0);
200 }
201 __ b(&convert);
202 __ bind(&convert_number);
203 __ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
204 __ b(&done_convert);
205 __ bind(&convert_smi);
206 __ SmiToDouble(d2, r2);
207 __ bind(&done_convert);
208
209 // Perform the actual comparison with the accumulator value on the left hand
210 // side (d1) and the next parameter value on the right hand side (d2).
211 Label compare_nan, compare_swap;
212 __ VFPCompareAndSetFlags(d1, d2);
213 __ b(cc_done, &loop);
214 __ b(cc_swap, &compare_swap);
215 __ b(vs, &compare_nan);
216
217 // Left and right hand side are equal, check for -0 vs. +0.
218 __ VmovHigh(ip, reg);
219 __ cmp(ip, Operand(0x80000000));
220 __ b(ne, &loop);
221
222 // Result is on the right hand side.
223 __ bind(&compare_swap);
224 __ vmov(d1, d2);
225 __ mov(r1, r2);
226 __ b(&loop);
227
228 // At least one side is NaN, which means that the result will be NaN too.
229 __ bind(&compare_nan);
230 __ LoadRoot(r1, Heap::kNanValueRootIndex);
231 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
232 __ b(&loop);
233 }
234
235 __ bind(&done_loop);
236 __ mov(r0, r1);
237 __ Drop(r4);
238 __ Ret();
239}
240
241// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800243 // ----------- S t a t e -------------
244 // -- r0 : number of arguments
245 // -- r1 : constructor function
246 // -- lr : return address
247 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
248 // -- sp[argc * 4] : receiver
249 // -----------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800250
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 // 1. Load the first argument into r0 and get rid of the rest (including the
252 // receiver).
253 Label no_arguments;
254 {
255 __ sub(r0, r0, Operand(1), SetCC);
256 __ b(lo, &no_arguments);
257 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
258 __ Drop(2);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800259 }
260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 // 2a. Convert the first argument to a number.
262 ToNumberStub stub(masm->isolate());
263 __ TailCallStub(&stub);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 // 2b. No arguments, return +0.
266 __ bind(&no_arguments);
267 __ Move(r0, Smi::FromInt(0));
268 __ Ret(1);
269}
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271
272// static
273void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800274 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 // -- r0 : number of arguments
276 // -- r1 : constructor function
277 // -- r3 : new target
278 // -- lr : return address
279 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
280 // -- sp[argc * 4] : receiver
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800281 // -----------------------------------
282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 // 1. Make sure we operate in the context of the called function.
284 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800285
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 // 2. Load the first argument into r2 and get rid of the rest (including the
287 // receiver).
288 {
289 Label no_arguments, done;
290 __ sub(r0, r0, Operand(1), SetCC);
291 __ b(lo, &no_arguments);
292 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
293 __ Drop(2);
294 __ b(&done);
295 __ bind(&no_arguments);
296 __ Move(r2, Smi::FromInt(0));
297 __ Drop(1);
298 __ bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800299 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800300
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301 // 3. Make sure r2 is a number.
302 {
303 Label done_convert;
304 __ JumpIfSmi(r2, &done_convert);
305 __ CompareObjectType(r2, r4, r4, HEAP_NUMBER_TYPE);
306 __ b(eq, &done_convert);
307 {
308 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
309 __ Push(r1, r3);
310 __ Move(r0, r2);
311 ToNumberStub stub(masm->isolate());
312 __ CallStub(&stub);
313 __ Move(r2, r0);
314 __ Pop(r1, r3);
315 }
316 __ bind(&done_convert);
317 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 // 4. Check if new target and constructor differ.
320 Label new_object;
321 __ cmp(r1, r3);
322 __ b(ne, &new_object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800323
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 // 5. Allocate a JSValue wrapper for the number.
325 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800326 __ Ret();
327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 // 6. Fallback to the runtime to create new object.
329 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100330 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100332 __ Push(r2); // first argument
333 FastNewObjectStub stub(masm->isolate());
334 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000335 __ Pop(r2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100336 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
338 __ Ret();
339}
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800340
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000341
342// static
343void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
344 // ----------- S t a t e -------------
345 // -- r0 : number of arguments
346 // -- r1 : constructor function
347 // -- lr : return address
348 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
349 // -- sp[argc * 4] : receiver
350 // -----------------------------------
351
352 // 1. Load the first argument into r0 and get rid of the rest (including the
353 // receiver).
354 Label no_arguments;
355 {
356 __ sub(r0, r0, Operand(1), SetCC);
357 __ b(lo, &no_arguments);
358 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
359 __ Drop(2);
360 }
361
362 // 2a. At least one argument, return r0 if it's a string, otherwise
363 // dispatch to appropriate conversion.
364 Label to_string, symbol_descriptive_string;
365 {
366 __ JumpIfSmi(r0, &to_string);
367 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
368 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
369 __ b(hi, &to_string);
370 __ b(eq, &symbol_descriptive_string);
371 __ Ret();
372 }
373
374 // 2b. No arguments, return the empty string (and pop the receiver).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800375 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 {
377 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
378 __ Ret(1);
379 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800380
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000381 // 3a. Convert r0 to a string.
382 __ bind(&to_string);
383 {
384 ToStringStub stub(masm->isolate());
385 __ TailCallStub(&stub);
386 }
387
388 // 3b. Convert symbol in r0 to a string.
389 __ bind(&symbol_descriptive_string);
390 {
391 __ Push(r0);
392 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
393 }
394}
395
396
397// static
398void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
399 // ----------- S t a t e -------------
400 // -- r0 : number of arguments
401 // -- r1 : constructor function
402 // -- r3 : new target
403 // -- lr : return address
404 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
405 // -- sp[argc * 4] : receiver
406 // -----------------------------------
407
408 // 1. Make sure we operate in the context of the called function.
409 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
410
411 // 2. Load the first argument into r2 and get rid of the rest (including the
412 // receiver).
413 {
414 Label no_arguments, done;
415 __ sub(r0, r0, Operand(1), SetCC);
416 __ b(lo, &no_arguments);
417 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
418 __ Drop(2);
419 __ b(&done);
420 __ bind(&no_arguments);
421 __ LoadRoot(r2, Heap::kempty_stringRootIndex);
422 __ Drop(1);
423 __ bind(&done);
424 }
425
426 // 3. Make sure r2 is a string.
427 {
428 Label convert, done_convert;
429 __ JumpIfSmi(r2, &convert);
430 __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
431 __ b(lo, &done_convert);
432 __ bind(&convert);
433 {
434 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
435 ToStringStub stub(masm->isolate());
436 __ Push(r1, r3);
437 __ Move(r0, r2);
438 __ CallStub(&stub);
439 __ Move(r2, r0);
440 __ Pop(r1, r3);
441 }
442 __ bind(&done_convert);
443 }
444
445 // 4. Check if new target and constructor differ.
446 Label new_object;
447 __ cmp(r1, r3);
448 __ b(ne, &new_object);
449
450 // 5. Allocate a JSValue wrapper for the string.
451 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
452 __ Ret();
453
454 // 6. Fallback to the runtime to create new object.
455 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100458 __ Push(r2); // first argument
459 FastNewObjectStub stub(masm->isolate());
460 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000461 __ Pop(r2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100462 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000463 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800464 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100465}
466
467
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000468static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
469 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
470 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
471 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
472 __ Jump(r2);
473}
474
Ben Murdoch097c5b22016-05-18 11:27:45 +0100475static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
476 Runtime::FunctionId function_id) {
477 // ----------- S t a t e -------------
478 // -- r0 : argument count (preserved for callee)
479 // -- r1 : target function (preserved for callee)
480 // -- r3 : new target (preserved for callee)
481 // -----------------------------------
482 {
483 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
484 // Push the number of arguments to the callee.
485 __ SmiTag(r0);
486 __ push(r0);
487 // Push a copy of the target function and the new target.
488 __ push(r1);
489 __ push(r3);
490 // Push function as parameter to the runtime call.
491 __ Push(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492
Ben Murdoch097c5b22016-05-18 11:27:45 +0100493 __ CallRuntime(function_id, 1);
494 __ mov(r2, r0);
495
496 // Restore target function and new target.
497 __ pop(r3);
498 __ pop(r1);
499 __ pop(r0);
500 __ SmiUntag(r0, r0);
501 }
502 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
503 __ Jump(r2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504}
505
506
507void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
508 // Checking whether the queued function is ready for install is optional,
509 // since we come across interrupts and stack checks elsewhere. However,
510 // not checking may delay installing ready functions, and always checking
511 // would be quite expensive. A good compromise is to first check against
512 // stack limit as a cue for an interrupt signal.
513 Label ok;
514 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
515 __ cmp(sp, Operand(ip));
516 __ b(hs, &ok);
517
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000519
520 __ bind(&ok);
521 GenerateTailCallToSharedCode(masm);
522}
523
524
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525static void Generate_JSConstructStubHelper(MacroAssembler* masm,
526 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100527 bool create_implicit_receiver,
528 bool check_derived_construct) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000529 // ----------- S t a t e -------------
530 // -- r0 : number of arguments
531 // -- r1 : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532 // -- r2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 // -- r3 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100534 // -- cp : context
Steve Blocka7e24c12009-10-30 11:49:00 +0000535 // -- lr : return address
536 // -- sp[...]: constructor arguments
537 // -----------------------------------
538
Steve Block44f0eee2011-05-26 01:26:41 +0100539 Isolate* isolate = masm->isolate();
540
Steve Blocka7e24c12009-10-30 11:49:00 +0000541 // Enter a construct frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100542 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000543 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 // Preserve the incoming parameters on the stack.
546 __ AssertUndefinedOrAllocationSite(r2, r4);
Ben Murdochda12d292016-06-02 14:46:10 +0100547 __ Push(cp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000548 __ SmiTag(r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100549 __ Push(r2, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100552 // Allocate the new receiver object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000553 __ Push(r1, r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100554 FastNewObjectStub stub(masm->isolate());
555 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 __ mov(r4, r0);
557 __ Pop(r1, r3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100558
Ben Murdoch097c5b22016-05-18 11:27:45 +0100559 // ----------- S t a t e -------------
560 // -- r1: constructor function
561 // -- r3: new target
562 // -- r4: newly allocated object
563 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100564
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565 // Retrieve smi-tagged arguments count from the stack.
566 __ ldr(r0, MemOperand(sp));
Steve Blocka7e24c12009-10-30 11:49:00 +0000567 }
568
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 __ SmiUntag(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000571 if (create_implicit_receiver) {
572 // Push the allocated receiver to the stack. We need two copies
573 // because we may have to return the original one and the calling
574 // conventions dictate that the called function pops the receiver.
575 __ push(r4);
576 __ push(r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000578 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000580
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100581 // Set up pointer to last argument.
582 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000583
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100584 // Copy arguments and receiver to the expression stack.
585 // r0: number of arguments
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000586 // r1: constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100587 // r2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000588 // r3: new target
589 // r4: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100590 // sp[0]: receiver
591 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 // sp[2]: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593 Label loop, entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594 __ SmiTag(r4, r0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100595 __ b(&entry);
596 __ bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000597 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100598 __ push(ip);
599 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600 __ sub(r4, r4, Operand(2), SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100601 __ b(ge, &loop);
602
603 // Call the function.
604 // r0: number of arguments
605 // r1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000606 // r3: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100607 ParameterCount actual(r0);
608 __ InvokeFunction(r1, r3, actual, CALL_FUNCTION,
609 CheckDebugStepCallWrapper());
Steve Blocka7e24c12009-10-30 11:49:00 +0000610
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100611 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000612 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100613 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
614 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000615
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100616 // Restore context from the frame.
617 // r0: result
618 // sp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619 // sp[1]: number of arguments (smi-tagged)
Ben Murdochda12d292016-06-02 14:46:10 +0100620 __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000621
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622 if (create_implicit_receiver) {
623 // If the result is an object (in the ECMA sense), we should get rid
624 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
625 // on page 74.
626 Label use_receiver, exit;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100627
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000628 // If the result is a smi, it is *not* an object in the ECMA sense.
629 // r0: result
630 // sp[0]: receiver
631 // sp[1]: number of arguments (smi-tagged)
632 __ JumpIfSmi(r0, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100633
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 // If the type of the result (stored in its map) is less than
635 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
636 __ CompareObjectType(r0, r1, r3, FIRST_JS_RECEIVER_TYPE);
637 __ b(ge, &exit);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100638
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 // Throw away the result of the constructor invocation and use the
640 // on-stack receiver as the result.
641 __ bind(&use_receiver);
642 __ ldr(r0, MemOperand(sp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 // Remove receiver from the stack, remove caller arguments, and
645 // return.
646 __ bind(&exit);
647 // r0: result
648 // sp[0]: receiver (newly allocated object)
649 // sp[1]: number of arguments (smi-tagged)
650 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
651 } else {
652 __ ldr(r1, MemOperand(sp));
653 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100654
655 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 }
657
Ben Murdoch097c5b22016-05-18 11:27:45 +0100658 // ES6 9.2.2. Step 13+
659 // Check that the result is not a Smi, indicating that the constructor result
660 // from a derived class is neither undefined nor an Object.
661 if (check_derived_construct) {
662 Label dont_throw;
663 __ JumpIfNotSmi(r0, &dont_throw);
664 {
665 FrameScope scope(masm, StackFrame::INTERNAL);
666 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
667 }
668 __ bind(&dont_throw);
669 }
670
Steve Blocka7e24c12009-10-30 11:49:00 +0000671 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
672 __ add(sp, sp, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000673 if (create_implicit_receiver) {
674 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
675 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000676 __ Jump(lr);
677}
678
679
Leon Clarkee46be812010-01-19 14:06:41 +0000680void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100681 Generate_JSConstructStubHelper(masm, false, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000682}
683
684
685void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100686 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687}
688
689
690void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691 Generate_JSConstructStubHelper(masm, false, false, false);
692}
693
694
695void Builtins::Generate_JSBuiltinsConstructStubForDerived(
696 MacroAssembler* masm) {
697 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698}
699
Ben Murdochc5610432016-08-08 18:44:38 +0100700// static
701void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
702 // ----------- S t a t e -------------
703 // -- r0 : the value to pass to the generator
704 // -- r1 : the JSGeneratorObject to resume
705 // -- r2 : the resume mode (tagged)
706 // -- lr : return address
707 // -----------------------------------
708 __ AssertGeneratorObject(r1);
709
710 // Store input value into generator object.
711 __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOffset));
712 __ RecordWriteField(r1, JSGeneratorObject::kInputOffset, r0, r3,
713 kLRHasNotBeenSaved, kDontSaveFPRegs);
714
715 // Store resume mode into generator object.
716 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kResumeModeOffset));
717
718 // Load suspended function and context.
719 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
720 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
721
722 // Flood function if we are stepping.
723 Label skip_flooding;
724 ExternalReference step_in_enabled =
725 ExternalReference::debug_step_in_enabled_address(masm->isolate());
726 __ mov(ip, Operand(step_in_enabled));
727 __ ldrb(ip, MemOperand(ip));
728 __ cmp(ip, Operand(0));
729 __ b(eq, &skip_flooding);
730 {
731 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
732 __ Push(r1, r2, r4);
733 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
734 __ Pop(r1, r2);
735 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
736 }
737 __ bind(&skip_flooding);
738
739 // Push receiver.
740 __ ldr(ip, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
741 __ Push(ip);
742
743 // ----------- S t a t e -------------
744 // -- r1 : the JSGeneratorObject to resume
745 // -- r2 : the resume mode (tagged)
746 // -- r4 : generator function
747 // -- cp : generator context
748 // -- lr : return address
749 // -- sp[0] : generator receiver
750 // -----------------------------------
751
752 // Push holes for arguments to generator function. Since the parser forced
753 // context allocation for any variables in generators, the actual argument
754 // values have already been copied into the context and these dummy values
755 // will never be used.
756 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
757 __ ldr(r3,
758 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
759 {
760 Label done_loop, loop;
761 __ bind(&loop);
762 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
763 __ b(mi, &done_loop);
764 __ PushRoot(Heap::kTheHoleValueRootIndex);
765 __ b(&loop);
766 __ bind(&done_loop);
767 }
768
769 // Dispatch on the kind of generator object.
770 Label old_generator;
771 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
772 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
773 __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
774 __ b(ne, &old_generator);
775
776 // New-style (ignition/turbofan) generator object
777 {
778 __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
779 __ ldr(r0,
780 FieldMemOperand(r0, SharedFunctionInfo::kFormalParameterCountOffset));
781 __ SmiUntag(r0);
782 // We abuse new.target both to indicate that this is a resume call and to
783 // pass in the generator object. In ordinary calls, new.target is always
784 // undefined because generator functions are non-constructable.
785 __ Move(r3, r1);
786 __ Move(r1, r4);
787 __ ldr(r5, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
788 __ Jump(r5);
789 }
790
791 // Old-style (full-codegen) generator object
792 __ bind(&old_generator);
793 {
794 // Enter a new JavaScript frame, and initialize its slots as they were when
795 // the generator was suspended.
796 DCHECK(!FLAG_enable_embedded_constant_pool);
797 FrameScope scope(masm, StackFrame::MANUAL);
798 __ Push(lr, fp);
799 __ Move(fp, sp);
800 __ Push(cp, r4);
801
802 // Restore the operand stack.
803 __ ldr(r0, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
804 __ ldr(r3, FieldMemOperand(r0, FixedArray::kLengthOffset));
805 __ add(r0, r0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
806 __ add(r3, r0, Operand(r3, LSL, kPointerSizeLog2 - 1));
807 {
808 Label done_loop, loop;
809 __ bind(&loop);
810 __ cmp(r0, r3);
811 __ b(eq, &done_loop);
812 __ ldr(ip, MemOperand(r0, kPointerSize, PostIndex));
813 __ Push(ip);
814 __ b(&loop);
815 __ bind(&done_loop);
816 }
817
818 // Reset operand stack so we don't leak.
819 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
820 __ str(ip, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
821
822 // Resume the generator function at the continuation.
823 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
824 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
825 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
826 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
827 __ add(r3, r3, Operand(r2, ASR, 1));
828 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
829 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
830 __ Move(r0, r1); // Continuation expects generator object in r0.
831 __ Jump(r3);
832 }
833}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000834
835void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
836 FrameScope scope(masm, StackFrame::INTERNAL);
837 __ push(r1);
838 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
839}
840
841
842enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
843
844
845// Clobbers r2; preserves all other registers.
846static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
847 IsTagged argc_is_tagged) {
848 // Check the stack for overflow. We are not trying to catch
849 // interruptions (e.g. debug break and preemption) here, so the "real stack
850 // limit" is checked.
851 Label okay;
852 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
853 // Make r2 the space we have left. The stack might already be overflowed
854 // here which will cause r2 to become negative.
855 __ sub(r2, sp, r2);
856 // Check if the arguments will overflow the stack.
857 if (argc_is_tagged == kArgcIsSmiTagged) {
858 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
859 } else {
860 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
861 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
862 }
863 __ b(gt, &okay); // Signed comparison.
864
865 // Out of stack space.
866 __ CallRuntime(Runtime::kThrowStackOverflow);
867
868 __ bind(&okay);
Leon Clarkee46be812010-01-19 14:06:41 +0000869}
870
871
Steve Blocka7e24c12009-10-30 11:49:00 +0000872static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
873 bool is_construct) {
874 // Called from Generate_JS_Entry
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000875 // r0: new.target
Steve Blocka7e24c12009-10-30 11:49:00 +0000876 // r1: function
877 // r2: receiver
878 // r3: argc
879 // r4: argv
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000880 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000881 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +0000882
Steve Blocka7e24c12009-10-30 11:49:00 +0000883 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100884 {
885 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000886
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000887 // Setup the context (we need to use the caller context from the isolate).
888 ExternalReference context_address(Isolate::kContextAddress,
889 masm->isolate());
890 __ mov(cp, Operand(context_address));
891 __ ldr(cp, MemOperand(cp));
Steve Blocka7e24c12009-10-30 11:49:00 +0000892
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100893 __ InitializeRootRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000894
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100895 // Push the function and the receiver onto the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000896 __ Push(r1, r2);
897
898 // Check if we have enough stack space to push all arguments.
899 // Clobbers r2.
900 Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
901
902 // Remember new.target.
903 __ mov(r5, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000904
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100905 // Copy arguments to the stack in a loop.
906 // r1: function
907 // r3: argc
908 // r4: argv, i.e. points to first arg
909 Label loop, entry;
910 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
911 // r2 points past last arg.
912 __ b(&entry);
913 __ bind(&loop);
914 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
915 __ ldr(r0, MemOperand(r0)); // dereference handle
916 __ push(r0); // push parameter
917 __ bind(&entry);
918 __ cmp(r4, r2);
919 __ b(ne, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000920
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000921 // Setup new.target and argc.
922 __ mov(r0, Operand(r3));
923 __ mov(r3, Operand(r5));
924
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100925 // Initialize all JavaScript callee-saved registers, since they will be seen
926 // by the garbage collector as part of handlers.
927 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
928 __ mov(r5, Operand(r4));
929 __ mov(r6, Operand(r4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000930 if (!FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000931 __ mov(r8, Operand(r4));
932 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100933 if (kR9Available == 1) {
934 __ mov(r9, Operand(r4));
935 }
936
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000937 // Invoke the code.
938 Handle<Code> builtin = is_construct
939 ? masm->isolate()->builtins()->Construct()
940 : masm->isolate()->builtins()->Call();
941 __ Call(builtin, RelocInfo::CODE_TARGET);
942
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100943 // Exit the JS frame and remove the parameters (except function), and
944 // return.
945 // Respect ABI stack constraint.
Steve Blocka7e24c12009-10-30 11:49:00 +0000946 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000947 __ Jump(lr);
948
949 // r0: result
950}
951
952
953void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
954 Generate_JSEntryTrampolineHelper(masm, false);
955}
956
957
958void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
959 Generate_JSEntryTrampolineHelper(masm, true);
960}
961
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000962// Generate code for entering a JS function with the interpreter.
963// On entry to the function the receiver and arguments have been pushed on the
964// stack left to right. The actual argument count matches the formal parameter
965// count expected by the function.
966//
967// The live registers are:
968// o r1: the JS function object being called.
969// o r3: the new target
970// o cp: our context
971// o pp: the caller's constant pool pointer (if enabled)
972// o fp: the caller's frame pointer
973// o sp: stack pointer
974// o lr: return address
975//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100976// The function builds an interpreter frame. See InterpreterFrameConstants in
977// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000978void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100979 ProfileEntryHookStub::MaybeCallEntryHook(masm);
980
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981 // Open a frame scope to indicate that there is a frame on the stack. The
982 // MANUAL indicates that the scope shouldn't actually generate code to set up
983 // the frame (that is done below).
984 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +0100985 __ PushStandardFrame(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000986
Ben Murdochc5610432016-08-08 18:44:38 +0100987 // Get the bytecode array from the function object (or from the DebugInfo if
988 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100990 Register debug_info = kInterpreterBytecodeArrayRegister;
991 DCHECK(!debug_info.is(r0));
992 __ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
993 __ cmp(debug_info, Operand(DebugInfo::uninitialized()));
994 // Load original bytecode array or the debug copy.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000995 __ ldr(kInterpreterBytecodeArrayRegister,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100996 FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
997 __ ldr(kInterpreterBytecodeArrayRegister,
998 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex), ne);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999
Ben Murdochc5610432016-08-08 18:44:38 +01001000 // Check function data field is actually a BytecodeArray object.
1001 Label bytecode_array_not_present;
1002 __ CompareRoot(kInterpreterBytecodeArrayRegister,
1003 Heap::kUndefinedValueRootIndex);
1004 __ b(eq, &bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 __ SmiTst(kInterpreterBytecodeArrayRegister);
1007 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1008 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
1009 BYTECODE_ARRAY_TYPE);
1010 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1011 }
1012
Ben Murdochc5610432016-08-08 18:44:38 +01001013 // Load the initial bytecode offset.
1014 __ mov(kInterpreterBytecodeOffsetRegister,
1015 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1016
1017 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1018 __ SmiTag(r0, kInterpreterBytecodeOffsetRegister);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001019 __ Push(r3, kInterpreterBytecodeArrayRegister, r0);
1020
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001021 // Allocate the local and temporary register file on the stack.
1022 {
1023 // Load frame size from the BytecodeArray object.
1024 __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1025 BytecodeArray::kFrameSizeOffset));
1026
1027 // Do a stack check to ensure we don't go over the limit.
1028 Label ok;
1029 __ sub(r9, sp, Operand(r4));
1030 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1031 __ cmp(r9, Operand(r2));
1032 __ b(hs, &ok);
1033 __ CallRuntime(Runtime::kThrowStackOverflow);
1034 __ bind(&ok);
1035
1036 // If ok, push undefined as the initial value for all register file entries.
1037 Label loop_header;
1038 Label loop_check;
1039 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
1040 __ b(&loop_check, al);
1041 __ bind(&loop_header);
1042 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1043 __ push(r9);
1044 // Continue loop if not done.
1045 __ bind(&loop_check);
1046 __ sub(r4, r4, Operand(kPointerSize), SetCC);
1047 __ b(&loop_header, ge);
1048 }
1049
Ben Murdochc5610432016-08-08 18:44:38 +01001050 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001051 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001052 __ mov(kInterpreterDispatchTableRegister,
1053 Operand(ExternalReference::interpreter_dispatch_table_address(
1054 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001055
1056 // Dispatch to the first bytecode handler for the function.
1057 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1058 kInterpreterBytecodeOffsetRegister));
1059 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1060 kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001061 __ Call(ip);
Ben Murdochc5610432016-08-08 18:44:38 +01001062 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001063
Ben Murdochc5610432016-08-08 18:44:38 +01001064 // The return value is in r0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065
Ben Murdochc5610432016-08-08 18:44:38 +01001066 // Get the arguments + reciever count.
1067 __ ldr(r2, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1068 __ ldr(r2, FieldMemOperand(r2, BytecodeArray::kParameterSizeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001069
1070 // Leave the frame (also dropping the register file).
1071 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1072
Ben Murdochc5610432016-08-08 18:44:38 +01001073 __ add(sp, sp, r2, LeaveCC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074 __ Jump(lr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075
Ben Murdochc5610432016-08-08 18:44:38 +01001076 // If the bytecode array is no longer present, then the underlying function
1077 // has been switched to a different kind of code and we heal the closure by
1078 // switching the code entry field over to the new code object as well.
1079 __ bind(&bytecode_array_not_present);
1080 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1081 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1082 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kCodeOffset));
1083 __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1084 __ str(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1085 __ RecordWriteCodeEntryField(r1, r4, r5);
1086 __ Jump(r4);
1087}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088
1089static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
1090 Register limit, Register scratch) {
1091 Label loop_header, loop_check;
1092 __ b(al, &loop_check);
1093 __ bind(&loop_header);
1094 __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
1095 __ push(scratch);
1096 __ bind(&loop_check);
1097 __ cmp(index, limit);
1098 __ b(gt, &loop_header);
1099}
1100
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001102void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1103 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001104 // ----------- S t a t e -------------
1105 // -- r0 : the number of arguments (not including the receiver)
1106 // -- r2 : the address of the first argument to be pushed. Subsequent
1107 // arguments should be consecutive above this, in the same order as
1108 // they are to be pushed onto the stack.
1109 // -- r1 : the target to call (can be any Object).
1110 // -----------------------------------
1111
1112 // Find the address of the last argument.
1113 __ add(r3, r0, Operand(1)); // Add one for receiver.
1114 __ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
1115 __ sub(r3, r2, r3);
1116
1117 // Push the arguments.
1118 Generate_InterpreterPushArgs(masm, r2, r3, r4);
1119
1120 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001121 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1122 tail_call_mode),
1123 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001124}
1125
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001126// static
1127void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1128 // ----------- S t a t e -------------
1129 // -- r0 : argument count (not including receiver)
1130 // -- r3 : new target
1131 // -- r1 : constructor to call
1132 // -- r2 : address of the first argument
1133 // -----------------------------------
1134
1135 // Find the address of the last argument.
1136 __ mov(r4, Operand(r0, LSL, kPointerSizeLog2));
1137 __ sub(r4, r2, r4);
1138
1139 // Push a slot for the receiver to be constructed.
1140 __ mov(ip, Operand::Zero());
1141 __ push(ip);
1142
1143 // Push the arguments.
1144 Generate_InterpreterPushArgs(masm, r2, r4, r5);
1145
1146 // Call the constructor with r0, r1, and r3 unmodified.
1147 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1148}
1149
Ben Murdochc5610432016-08-08 18:44:38 +01001150void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1151 // Set the return address to the correct point in the interpreter entry
1152 // trampoline.
1153 Smi* interpreter_entry_return_pc_offset(
1154 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1155 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1156 __ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1157 __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() +
1158 Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001159
Ben Murdochc5610432016-08-08 18:44:38 +01001160 // Initialize the dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001161 __ mov(kInterpreterDispatchTableRegister,
1162 Operand(ExternalReference::interpreter_dispatch_table_address(
1163 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001164
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001165 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001166 __ ldr(kInterpreterBytecodeArrayRegister,
1167 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001168
1169 if (FLAG_debug_code) {
1170 // Check function data field is actually a BytecodeArray object.
1171 __ SmiTst(kInterpreterBytecodeArrayRegister);
1172 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1173 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1174 BYTECODE_ARRAY_TYPE);
1175 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1176 }
1177
1178 // Get the target bytecode offset from the frame.
1179 __ ldr(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001180 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001181 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1182
1183 // Dispatch to the target bytecode.
1184 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1185 kInterpreterBytecodeOffsetRegister));
1186 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1187 kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001188 __ mov(pc, ip);
1189}
1190
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001192 // ----------- S t a t e -------------
1193 // -- r0 : argument count (preserved for callee)
1194 // -- r3 : new target (preserved for callee)
1195 // -- r1 : target function (preserved for callee)
1196 // -----------------------------------
1197 // First lookup code, maybe we don't need to compile!
1198 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1199 Label maybe_call_runtime;
1200 Label try_shared;
1201 Label loop_top, loop_bottom;
1202
1203 Register argument_count = r0;
1204 Register closure = r1;
1205 Register new_target = r3;
1206 __ push(argument_count);
1207 __ push(new_target);
1208 __ push(closure);
1209
1210 Register map = argument_count;
1211 Register index = r2;
1212 __ ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1213 __ ldr(map,
1214 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1215 __ ldr(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1216 __ cmp(index, Operand(Smi::FromInt(2)));
1217 __ b(lt, &gotta_call_runtime);
1218
1219 // Find literals.
1220 // r3 : native context
1221 // r2 : length / index
1222 // r0 : optimized code map
1223 // stack[0] : new target
1224 // stack[4] : closure
1225 Register native_context = r3;
1226 __ ldr(native_context, NativeContextMemOperand());
1227
1228 __ bind(&loop_top);
1229 Register temp = r1;
1230 Register array_pointer = r5;
1231
1232 // Does the native context match?
1233 __ add(array_pointer, map, Operand::PointerOffsetFromSmiKey(index));
1234 __ ldr(temp, FieldMemOperand(array_pointer,
1235 SharedFunctionInfo::kOffsetToPreviousContext));
1236 __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1237 __ cmp(temp, native_context);
1238 __ b(ne, &loop_bottom);
1239 // OSR id set to none?
1240 __ ldr(temp, FieldMemOperand(array_pointer,
1241 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1242 const int bailout_id = BailoutId::None().ToInt();
1243 __ cmp(temp, Operand(Smi::FromInt(bailout_id)));
1244 __ b(ne, &loop_bottom);
1245 // Literals available?
1246 __ ldr(temp, FieldMemOperand(array_pointer,
1247 SharedFunctionInfo::kOffsetToPreviousLiterals));
1248 __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1249 __ JumpIfSmi(temp, &gotta_call_runtime);
1250
1251 // Save the literals in the closure.
1252 __ ldr(r4, MemOperand(sp, 0));
1253 __ str(temp, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
1254 __ push(index);
1255 __ RecordWriteField(r4, JSFunction::kLiteralsOffset, temp, index,
1256 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1257 OMIT_SMI_CHECK);
1258 __ pop(index);
1259
1260 // Code available?
1261 Register entry = r4;
1262 __ ldr(entry,
1263 FieldMemOperand(array_pointer,
1264 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1265 __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1266 __ JumpIfSmi(entry, &maybe_call_runtime);
1267
1268 // Found literals and code. Get them into the closure and return.
1269 __ pop(closure);
1270 // Store code entry in the closure.
1271 __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1272
1273 Label install_optimized_code_and_tailcall;
1274 __ bind(&install_optimized_code_and_tailcall);
1275 __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1276 __ RecordWriteCodeEntryField(closure, entry, r5);
1277
1278 // Link the closure into the optimized function list.
1279 // r4 : code entry
1280 // r3 : native context
1281 // r1 : closure
1282 __ ldr(r5,
1283 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1284 __ str(r5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1285 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r5, r0,
1286 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1287 OMIT_SMI_CHECK);
1288 const int function_list_offset =
1289 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1290 __ str(closure,
1291 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1292 // Save closure before the write barrier.
1293 __ mov(r5, closure);
1294 __ RecordWriteContextSlot(native_context, function_list_offset, closure, r0,
1295 kLRHasNotBeenSaved, kDontSaveFPRegs);
1296 __ mov(closure, r5);
1297 __ pop(new_target);
1298 __ pop(argument_count);
1299 __ Jump(entry);
1300
1301 __ bind(&loop_bottom);
1302 __ sub(index, index, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1303 __ cmp(index, Operand(Smi::FromInt(1)));
1304 __ b(gt, &loop_top);
1305
1306 // We found neither literals nor code.
1307 __ jmp(&gotta_call_runtime);
1308
1309 __ bind(&maybe_call_runtime);
1310 __ pop(closure);
1311
1312 // Last possibility. Check the context free optimized code map entry.
1313 __ ldr(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1314 SharedFunctionInfo::kSharedCodeIndex));
1315 __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1316 __ JumpIfSmi(entry, &try_shared);
1317
1318 // Store code entry in the closure.
1319 __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1320 __ jmp(&install_optimized_code_and_tailcall);
1321
1322 __ bind(&try_shared);
1323 __ pop(new_target);
1324 __ pop(argument_count);
1325 // Is the full code valid?
1326 __ ldr(entry,
1327 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1328 __ ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1329 __ ldr(r5, FieldMemOperand(entry, Code::kFlagsOffset));
1330 __ and_(r5, r5, Operand(Code::KindField::kMask));
1331 __ mov(r5, Operand(r5, LSR, Code::KindField::kShift));
1332 __ cmp(r5, Operand(Code::BUILTIN));
1333 __ b(eq, &gotta_call_runtime_no_stack);
1334 // Yes, install the full code.
1335 __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1336 __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1337 __ RecordWriteCodeEntryField(closure, entry, r5);
1338 __ Jump(entry);
1339
1340 __ bind(&gotta_call_runtime);
1341 __ pop(closure);
1342 __ pop(new_target);
1343 __ pop(argument_count);
1344 __ bind(&gotta_call_runtime_no_stack);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001345 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Iain Merrick75681382010-08-19 15:07:18 +01001346}
1347
Ben Murdochc5610432016-08-08 18:44:38 +01001348void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1349 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1350}
Iain Merrick75681382010-08-19 15:07:18 +01001351
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001353 GenerateTailCallToReturnedCode(masm,
1354 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001355}
1356
1357
1358void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001359 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001360}
1361
1362
1363static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1364 // For now, we are relying on the fact that make_code_young doesn't do any
1365 // garbage collection which allows us to save/restore the registers without
1366 // worrying about which of them contain pointers. We also don't build an
1367 // internal frame to make the code faster, since we shouldn't have to do stack
1368 // crawls in MakeCodeYoung. This seems a bit fragile.
1369
1370 // The following registers must be saved and restored when calling through to
1371 // the runtime:
1372 // r0 - contains return address (beginning of patch sequence)
1373 // r1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001374 // r3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001375 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001376 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001377 __ PrepareCallCFunction(2, 0, r2);
1378 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1379 __ CallCFunction(
1380 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001381 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001382 __ mov(pc, r0);
1383}
1384
1385#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1386void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1387 MacroAssembler* masm) { \
1388 GenerateMakeCodeYoungAgainCommon(masm); \
1389} \
1390void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1391 MacroAssembler* masm) { \
1392 GenerateMakeCodeYoungAgainCommon(masm); \
1393}
1394CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1395#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1396
1397
1398void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1399 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1400 // that make_code_young doesn't do any garbage collection which allows us to
1401 // save/restore the registers without worrying about which of them contain
1402 // pointers.
1403
1404 // The following registers must be saved and restored when calling through to
1405 // the runtime:
1406 // r0 - contains return address (beginning of patch sequence)
1407 // r1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408 // r3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001409 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001410 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001411 __ PrepareCallCFunction(2, 0, r2);
1412 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1413 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
1414 masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001416
1417 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001418 __ PushStandardFrame(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419
1420 // Jump to point after the code-age stub.
1421 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1422 __ mov(pc, r0);
1423}
1424
1425
1426void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1427 GenerateMakeCodeYoungAgainCommon(masm);
1428}
1429
1430
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1432 Generate_MarkCodeAsExecutedOnce(masm);
1433}
1434
1435
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001436static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1437 SaveFPRegsMode save_doubles) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001438 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001439 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001440
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001441 // Preserve registers across notification, this is important for compiled
1442 // stubs that tail call the runtime on deopts passing their parameters in
1443 // registers.
1444 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1445 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001446 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001447 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001448 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001449
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001450 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1451 __ mov(pc, lr); // Jump to miss handler
1452}
1453
1454
1455void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1456 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1457}
1458
1459
1460void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1461 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001462}
1463
1464
1465static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1466 Deoptimizer::BailoutType type) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001467 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001469 // Pass the function and deoptimization type to the runtime system.
1470 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1471 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001472 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001473 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001474
1475 // Get the full codegen state from the stack and untag it -> r6.
1476 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1477 __ SmiUntag(r6);
1478 // Switch on the state.
1479 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001480 __ cmp(r6,
1481 Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001482 __ b(ne, &with_tos_register);
1483 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1484 __ Ret();
1485
1486 __ bind(&with_tos_register);
Ben Murdochc5610432016-08-08 18:44:38 +01001487 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r0.code());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001488 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001489 __ cmp(r6,
1490 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001491 __ b(ne, &unknown_state);
1492 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1493 __ Ret();
1494
1495 __ bind(&unknown_state);
1496 __ stop("no cases left");
1497}
1498
1499
1500void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1501 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1502}
1503
1504
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001505void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1506 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1507}
1508
1509
Ben Murdochb0fe1622011-05-05 13:52:32 +01001510void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1511 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1512}
1513
1514
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001515static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1516 Register function_template_info,
1517 Register scratch0, Register scratch1,
1518 Register scratch2,
1519 Label* receiver_check_failed) {
1520 Register signature = scratch0;
1521 Register map = scratch1;
1522 Register constructor = scratch2;
1523
1524 // If there is no signature, return the holder.
1525 __ ldr(signature, FieldMemOperand(function_template_info,
1526 FunctionTemplateInfo::kSignatureOffset));
1527 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1528 Label receiver_check_passed;
1529 __ b(eq, &receiver_check_passed);
1530
1531 // Walk the prototype chain.
1532 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1533 Label prototype_loop_start;
1534 __ bind(&prototype_loop_start);
1535
1536 // Get the constructor, if any.
1537 __ GetMapConstructor(constructor, map, ip, ip);
1538 __ cmp(ip, Operand(JS_FUNCTION_TYPE));
1539 Label next_prototype;
1540 __ b(ne, &next_prototype);
1541 Register type = constructor;
1542 __ ldr(type,
1543 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1544 __ ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1545
1546 // Loop through the chain of inheriting function templates.
1547 Label function_template_loop;
1548 __ bind(&function_template_loop);
1549
1550 // If the signatures match, we have a compatible receiver.
1551 __ cmp(signature, type);
1552 __ b(eq, &receiver_check_passed);
1553
1554 // If the current type is not a FunctionTemplateInfo, load the next prototype
1555 // in the chain.
1556 __ JumpIfSmi(type, &next_prototype);
1557 __ CompareObjectType(type, ip, ip, FUNCTION_TEMPLATE_INFO_TYPE);
1558
1559 // Otherwise load the parent function template and iterate.
1560 __ ldr(type,
1561 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset),
1562 eq);
1563 __ b(&function_template_loop, eq);
1564
1565 // Load the next prototype.
1566 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001567 __ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001568 __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001569 __ b(eq, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001570 __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1571 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 // Iterate.
1573 __ b(&prototype_loop_start);
1574
1575 __ bind(&receiver_check_passed);
1576}
1577
1578
1579void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1580 // ----------- S t a t e -------------
1581 // -- r0 : number of arguments excluding receiver
1582 // -- r1 : callee
1583 // -- lr : return address
1584 // -- sp[0] : last argument
1585 // -- ...
1586 // -- sp[4 * (argc - 1)] : first argument
1587 // -- sp[4 * argc] : receiver
1588 // -----------------------------------
1589
1590 // Load the FunctionTemplateInfo.
1591 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1592 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1593
1594 // Do the compatible receiver check.
1595 Label receiver_check_failed;
1596 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1597 CompatibleReceiverCheck(masm, r2, r3, r4, r5, r6, &receiver_check_failed);
1598
1599 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1600 // beginning of the code.
1601 __ ldr(r4, FieldMemOperand(r3, FunctionTemplateInfo::kCallCodeOffset));
1602 __ ldr(r4, FieldMemOperand(r4, CallHandlerInfo::kFastHandlerOffset));
1603 __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1604 __ Jump(r4);
1605
1606 // Compatible receiver check failed: throw an Illegal Invocation exception.
1607 __ bind(&receiver_check_failed);
1608 // Drop the arguments (including the receiver)
1609 __ add(r0, r0, Operand(1));
1610 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1611 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1612}
1613
1614
Ben Murdochb0fe1622011-05-05 13:52:32 +01001615void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001616 // Lookup the function in the JavaScript frame.
Steve Block1e0659c2011-05-24 12:43:12 +01001617 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001618 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001619 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1620 // Pass function as argument.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001621 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001622 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001623 }
Steve Block1e0659c2011-05-24 12:43:12 +01001624
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001625 // If the code object is null, just return to the unoptimized code.
Steve Block1e0659c2011-05-24 12:43:12 +01001626 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001627 __ cmp(r0, Operand(Smi::FromInt(0)));
Steve Block1e0659c2011-05-24 12:43:12 +01001628 __ b(ne, &skip);
1629 __ Ret();
1630
1631 __ bind(&skip);
Steve Block1e0659c2011-05-24 12:43:12 +01001632
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001633 // Load deoptimization data from the code object.
1634 // <deopt_data> = <code>[#deoptimization_data_offset]
1635 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1636
1637 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001638 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1639
1640 if (FLAG_enable_embedded_constant_pool) {
1641 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642 }
1643
1644 // Load the OSR entrypoint offset from the deoptimization data.
1645 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1646 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1647 DeoptimizationInputData::kOsrPcOffsetIndex)));
1648
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001649 // Compute the target address = code start + osr_offset
1650 __ add(lr, r0, Operand::SmiUntag(r1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001651
1652 // And "return" to the OSR entry point of the function.
1653 __ Ret();
1654 }
1655}
1656
1657
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001658// static
1659void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1660 int field_index) {
1661 // ----------- S t a t e -------------
1662 // -- lr : return address
1663 // -- sp[0] : receiver
1664 // -----------------------------------
1665
1666 // 1. Pop receiver into r0 and check that it's actually a JSDate object.
1667 Label receiver_not_date;
1668 {
1669 __ Pop(r0);
1670 __ JumpIfSmi(r0, &receiver_not_date);
1671 __ CompareObjectType(r0, r1, r2, JS_DATE_TYPE);
1672 __ b(ne, &receiver_not_date);
1673 }
1674
1675 // 2. Load the specified date field, falling back to the runtime as necessary.
1676 if (field_index == JSDate::kDateValue) {
1677 __ ldr(r0, FieldMemOperand(r0, JSDate::kValueOffset));
1678 } else {
1679 if (field_index < JSDate::kFirstUncachedField) {
1680 Label stamp_mismatch;
1681 __ mov(r1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1682 __ ldr(r1, MemOperand(r1));
1683 __ ldr(ip, FieldMemOperand(r0, JSDate::kCacheStampOffset));
1684 __ cmp(r1, ip);
1685 __ b(ne, &stamp_mismatch);
1686 __ ldr(r0, FieldMemOperand(
1687 r0, JSDate::kValueOffset + field_index * kPointerSize));
1688 __ Ret();
1689 __ bind(&stamp_mismatch);
1690 }
1691 FrameScope scope(masm, StackFrame::INTERNAL);
1692 __ PrepareCallCFunction(2, r1);
1693 __ mov(r1, Operand(Smi::FromInt(field_index)));
1694 __ CallCFunction(
1695 ExternalReference::get_date_field_function(masm->isolate()), 2);
1696 }
1697 __ Ret();
1698
1699 // 3. Raise a TypeError if the receiver is not a date.
1700 __ bind(&receiver_not_date);
1701 __ TailCallRuntime(Runtime::kThrowNotDateError);
1702}
1703
Ben Murdochda12d292016-06-02 14:46:10 +01001704// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1706 // ----------- S t a t e -------------
1707 // -- r0 : argc
1708 // -- sp[0] : argArray
1709 // -- sp[4] : thisArg
1710 // -- sp[8] : receiver
1711 // -----------------------------------
1712
1713 // 1. Load receiver into r1, argArray into r0 (if present), remove all
1714 // arguments from the stack (including the receiver), and push thisArg (if
1715 // present) instead.
1716 {
1717 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1718 __ mov(r3, r2);
1719 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1720 __ sub(r4, r0, Operand(1), SetCC);
1721 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArg
1722 __ sub(r4, r4, Operand(1), SetCC, ge);
1723 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argArray
1724 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1725 __ str(r2, MemOperand(sp, 0));
1726 __ mov(r0, r3);
1727 }
1728
1729 // ----------- S t a t e -------------
1730 // -- r0 : argArray
1731 // -- r1 : receiver
1732 // -- sp[0] : thisArg
1733 // -----------------------------------
1734
1735 // 2. Make sure the receiver is actually callable.
1736 Label receiver_not_callable;
1737 __ JumpIfSmi(r1, &receiver_not_callable);
1738 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1739 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1740 __ tst(r4, Operand(1 << Map::kIsCallable));
1741 __ b(eq, &receiver_not_callable);
1742
1743 // 3. Tail call with no arguments if argArray is null or undefined.
1744 Label no_arguments;
1745 __ JumpIfRoot(r0, Heap::kNullValueRootIndex, &no_arguments);
1746 __ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &no_arguments);
1747
1748 // 4a. Apply the receiver to the given argArray (passing undefined for
1749 // new.target).
1750 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1751 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1752
1753 // 4b. The argArray is either null or undefined, so we tail call without any
1754 // arguments to the receiver.
1755 __ bind(&no_arguments);
1756 {
1757 __ mov(r0, Operand(0));
1758 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1759 }
1760
1761 // 4c. The receiver is not callable, throw an appropriate TypeError.
1762 __ bind(&receiver_not_callable);
1763 {
1764 __ str(r1, MemOperand(sp, 0));
1765 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1766 }
1767}
1768
1769
1770// static
1771void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001772 // 1. Make sure we have at least one argument.
Andrei Popescu402d9372010-02-26 13:31:12 +00001773 // r0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001774 {
1775 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776 __ cmp(r0, Operand::Zero());
Steve Blocka7e24c12009-10-30 11:49:00 +00001777 __ b(ne, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001778 __ PushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001779 __ add(r0, r0, Operand(1));
1780 __ bind(&done);
1781 }
1782
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001783 // 2. Get the callable to call (passed as receiver) from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +00001784 // r0: actual number of arguments
Andrei Popescu402d9372010-02-26 13:31:12 +00001785 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001786
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001787 // 3. Shift arguments and return address one slot down on the stack
Andrei Popescu402d9372010-02-26 13:31:12 +00001788 // (overwriting the original receiver). Adjust argument count to make
1789 // the original first argument the new receiver.
1790 // r0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 // r1: callable
1792 {
1793 Label loop;
Steve Blocka7e24c12009-10-30 11:49:00 +00001794 // Calculate the copy start address (destination). Copy end address is sp.
1795 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001796
1797 __ bind(&loop);
1798 __ ldr(ip, MemOperand(r2, -kPointerSize));
1799 __ str(ip, MemOperand(r2));
1800 __ sub(r2, r2, Operand(kPointerSize));
1801 __ cmp(r2, sp);
1802 __ b(ne, &loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001803 // Adjust the actual number of arguments and remove the top element
1804 // (which is a copy of the last argument).
1805 __ sub(r0, r0, Operand(1));
1806 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001807 }
1808
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001809 // 4. Call the callable.
1810 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00001811}
1812
1813
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001814void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1815 // ----------- S t a t e -------------
1816 // -- r0 : argc
1817 // -- sp[0] : argumentsList
1818 // -- sp[4] : thisArgument
1819 // -- sp[8] : target
1820 // -- sp[12] : receiver
1821 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00001822
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
1824 // remove all arguments from the stack (including the receiver), and push
1825 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001826 {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001827 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001828 __ mov(r2, r1);
1829 __ mov(r3, r1);
1830 __ sub(r4, r0, Operand(1), SetCC);
1831 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1832 __ sub(r4, r4, Operand(1), SetCC, ge);
1833 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArgument
1834 __ sub(r4, r4, Operand(1), SetCC, ge);
1835 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1836 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1837 __ str(r2, MemOperand(sp, 0));
1838 __ mov(r0, r3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001839 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001840
1841 // ----------- S t a t e -------------
1842 // -- r0 : argumentsList
1843 // -- r1 : target
1844 // -- sp[0] : thisArgument
1845 // -----------------------------------
1846
1847 // 2. Make sure the target is actually callable.
1848 Label target_not_callable;
1849 __ JumpIfSmi(r1, &target_not_callable);
1850 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1851 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1852 __ tst(r4, Operand(1 << Map::kIsCallable));
1853 __ b(eq, &target_not_callable);
1854
1855 // 3a. Apply the target to the given argumentsList (passing undefined for
1856 // new.target).
1857 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1858 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1859
1860 // 3b. The target is not callable, throw an appropriate TypeError.
1861 __ bind(&target_not_callable);
1862 {
1863 __ str(r1, MemOperand(sp, 0));
1864 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1865 }
1866}
1867
1868
1869void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1870 // ----------- S t a t e -------------
1871 // -- r0 : argc
1872 // -- sp[0] : new.target (optional)
1873 // -- sp[4] : argumentsList
1874 // -- sp[8] : target
1875 // -- sp[12] : receiver
1876 // -----------------------------------
1877
1878 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
1879 // new.target into r3 (if present, otherwise use target), remove all
1880 // arguments from the stack (including the receiver), and push thisArgument
1881 // (if present) instead.
1882 {
1883 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1884 __ mov(r2, r1);
1885 __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1886 __ sub(r4, r0, Operand(1), SetCC);
1887 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1888 __ mov(r3, r1); // new.target defaults to target
1889 __ sub(r4, r4, Operand(1), SetCC, ge);
1890 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1891 __ sub(r4, r4, Operand(1), SetCC, ge);
1892 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // new.target
1893 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1894 __ mov(r0, r2);
1895 }
1896
1897 // ----------- S t a t e -------------
1898 // -- r0 : argumentsList
1899 // -- r3 : new.target
1900 // -- r1 : target
1901 // -- sp[0] : receiver (undefined)
1902 // -----------------------------------
1903
1904 // 2. Make sure the target is actually a constructor.
1905 Label target_not_constructor;
1906 __ JumpIfSmi(r1, &target_not_constructor);
1907 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1908 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1909 __ tst(r4, Operand(1 << Map::kIsConstructor));
1910 __ b(eq, &target_not_constructor);
1911
1912 // 3. Make sure the target is actually a constructor.
1913 Label new_target_not_constructor;
1914 __ JumpIfSmi(r3, &new_target_not_constructor);
1915 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
1916 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1917 __ tst(r4, Operand(1 << Map::kIsConstructor));
1918 __ b(eq, &new_target_not_constructor);
1919
1920 // 4a. Construct the target with the given new.target and argumentsList.
1921 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1922
1923 // 4b. The target is not a constructor, throw an appropriate TypeError.
1924 __ bind(&target_not_constructor);
1925 {
1926 __ str(r1, MemOperand(sp, 0));
1927 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1928 }
1929
1930 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1931 __ bind(&new_target_not_constructor);
1932 {
1933 __ str(r3, MemOperand(sp, 0));
1934 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1935 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001936}
1937
1938
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001939static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1940 Label* stack_overflow) {
1941 // ----------- S t a t e -------------
1942 // -- r0 : actual number of arguments
1943 // -- r1 : function (passed through to callee)
1944 // -- r2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001945 // -- r3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001946 // -----------------------------------
1947 // Check the stack for overflow. We are not trying to catch
1948 // interruptions (e.g. debug break and preemption) here, so the "real stack
1949 // limit" is checked.
1950 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1951 // Make r5 the space we have left. The stack might already be overflowed
1952 // here which will cause r5 to become negative.
1953 __ sub(r5, sp, r5);
1954 // Check if the arguments will overflow the stack.
1955 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
1956 __ b(le, stack_overflow); // Signed comparison.
1957}
1958
1959
Steve Blocka7e24c12009-10-30 11:49:00 +00001960static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001961 __ SmiTag(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001962 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001963 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001964 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
1965 fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001966 __ add(fp, sp,
1967 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001968}
1969
1970
1971static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1972 // ----------- S t a t e -------------
1973 // -- r0 : result being passed through
1974 // -----------------------------------
1975 // Get the number of arguments passed (as a smi), tear down the frame and
1976 // then tear down the parameters.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1978 kPointerSize)));
1979
1980 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1981 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001982 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1983}
1984
1985
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001986// static
1987void Builtins::Generate_Apply(MacroAssembler* masm) {
1988 // ----------- S t a t e -------------
1989 // -- r0 : argumentsList
1990 // -- r1 : target
1991 // -- r3 : new.target (checked to be constructor or undefined)
1992 // -- sp[0] : thisArgument
1993 // -----------------------------------
1994
1995 // Create the list of arguments from the array-like argumentsList.
1996 {
1997 Label create_arguments, create_array, create_runtime, done_create;
1998 __ JumpIfSmi(r0, &create_runtime);
1999
2000 // Load the map of argumentsList into r2.
2001 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2002
2003 // Load native context into r4.
2004 __ ldr(r4, NativeContextMemOperand());
2005
2006 // Check if argumentsList is an (unmodified) arguments object.
2007 __ ldr(ip, ContextMemOperand(r4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2008 __ cmp(ip, r2);
2009 __ b(eq, &create_arguments);
2010 __ ldr(ip, ContextMemOperand(r4, Context::STRICT_ARGUMENTS_MAP_INDEX));
2011 __ cmp(ip, r2);
2012 __ b(eq, &create_arguments);
2013
2014 // Check if argumentsList is a fast JSArray.
2015 __ CompareInstanceType(r2, ip, JS_ARRAY_TYPE);
2016 __ b(eq, &create_array);
2017
2018 // Ask the runtime to create the list (actually a FixedArray).
2019 __ bind(&create_runtime);
2020 {
2021 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2022 __ Push(r1, r3, r0);
2023 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2024 __ Pop(r1, r3);
2025 __ ldr(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
2026 __ SmiUntag(r2);
2027 }
2028 __ jmp(&done_create);
2029
2030 // Try to create the list from an arguments object.
2031 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002032 __ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033 __ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2034 __ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
2035 __ cmp(r2, ip);
2036 __ b(ne, &create_runtime);
2037 __ SmiUntag(r2);
2038 __ mov(r0, r4);
2039 __ b(&done_create);
2040
2041 // Try to create the list from a JSArray object.
2042 __ bind(&create_array);
2043 __ ldr(r2, FieldMemOperand(r2, Map::kBitField2Offset));
2044 __ DecodeField<Map::ElementsKindBits>(r2);
2045 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2046 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2047 STATIC_ASSERT(FAST_ELEMENTS == 2);
2048 __ cmp(r2, Operand(FAST_ELEMENTS));
2049 __ b(hi, &create_runtime);
2050 __ cmp(r2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2051 __ b(eq, &create_runtime);
2052 __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
2053 __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
2054 __ SmiUntag(r2);
2055
2056 __ bind(&done_create);
2057 }
2058
2059 // Check for stack overflow.
2060 {
2061 // Check the stack for overflow. We are not trying to catch interruptions
2062 // (i.e. debug break and preemption) here, so check the "real stack limit".
2063 Label done;
2064 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2065 // Make ip the space we have left. The stack might already be overflowed
2066 // here which will cause ip to become negative.
2067 __ sub(ip, sp, ip);
2068 // Check if the arguments will overflow the stack.
2069 __ cmp(ip, Operand(r2, LSL, kPointerSizeLog2));
2070 __ b(gt, &done); // Signed comparison.
2071 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2072 __ bind(&done);
2073 }
2074
2075 // ----------- S t a t e -------------
2076 // -- r1 : target
2077 // -- r0 : args (a FixedArray built from argumentsList)
2078 // -- r2 : len (number of elements to push from args)
2079 // -- r3 : new.target (checked to be constructor or undefined)
2080 // -- sp[0] : thisArgument
2081 // -----------------------------------
2082
2083 // Push arguments onto the stack (thisArgument is already on the stack).
2084 {
2085 __ mov(r4, Operand(0));
2086 Label done, loop;
2087 __ bind(&loop);
2088 __ cmp(r4, r2);
2089 __ b(eq, &done);
2090 __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2));
2091 __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize));
2092 __ Push(ip);
2093 __ add(r4, r4, Operand(1));
2094 __ b(&loop);
2095 __ bind(&done);
2096 __ Move(r0, r4);
2097 }
2098
2099 // Dispatch to Call or Construct depending on whether new.target is undefined.
2100 {
2101 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
2102 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2103 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2104 }
2105}
2106
Ben Murdoch097c5b22016-05-18 11:27:45 +01002107namespace {
2108
2109// Drops top JavaScript frame and an arguments adaptor frame below it (if
2110// present) preserving all the arguments prepared for current call.
2111// Does nothing if debugger is currently active.
2112// ES6 14.6.3. PrepareForTailCall
2113//
2114// Stack structure for the function g() tail calling f():
2115//
2116// ------- Caller frame: -------
2117// | ...
2118// | g()'s arg M
2119// | ...
2120// | g()'s arg 1
2121// | g()'s receiver arg
2122// | g()'s caller pc
2123// ------- g()'s frame: -------
2124// | g()'s caller fp <- fp
2125// | g()'s context
2126// | function pointer: g
2127// | -------------------------
2128// | ...
2129// | ...
2130// | f()'s arg N
2131// | ...
2132// | f()'s arg 1
2133// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2134// ----------------------
2135//
2136void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2137 Register scratch1, Register scratch2,
2138 Register scratch3) {
2139 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2140 Comment cmnt(masm, "[ PrepareForTailCall");
2141
Ben Murdochda12d292016-06-02 14:46:10 +01002142 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002143 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002144 ExternalReference is_tail_call_elimination_enabled =
2145 ExternalReference::is_tail_call_elimination_enabled_address(
2146 masm->isolate());
2147 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002148 __ ldrb(scratch1, MemOperand(scratch1));
2149 __ cmp(scratch1, Operand(0));
Ben Murdochda12d292016-06-02 14:46:10 +01002150 __ b(eq, &done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002151
2152 // Drop possible interpreter handler/stub frame.
2153 {
2154 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002155 __ ldr(scratch3,
2156 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002157 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
2158 __ b(ne, &no_interpreter_frame);
2159 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2160 __ bind(&no_interpreter_frame);
2161 }
2162
2163 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002164 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002165 Label no_arguments_adaptor, formal_parameter_count_loaded;
2166 __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2167 __ ldr(scratch3,
Ben Murdochda12d292016-06-02 14:46:10 +01002168 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002169 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2170 __ b(ne, &no_arguments_adaptor);
2171
Ben Murdochda12d292016-06-02 14:46:10 +01002172 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002173 __ mov(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002174 __ ldr(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002175 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002176 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002177 __ b(&formal_parameter_count_loaded);
2178
2179 __ bind(&no_arguments_adaptor);
2180 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01002181 __ ldr(scratch1,
2182 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002183 __ ldr(scratch1,
2184 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002185 __ ldr(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002186 FieldMemOperand(scratch1,
2187 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002188 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002189
2190 __ bind(&formal_parameter_count_loaded);
2191
Ben Murdochda12d292016-06-02 14:46:10 +01002192 ParameterCount callee_args_count(args_reg);
2193 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2194 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002195 __ bind(&done);
2196}
2197} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002198
2199// static
2200void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002201 ConvertReceiverMode mode,
2202 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002203 // ----------- S t a t e -------------
2204 // -- r0 : the number of arguments (not including the receiver)
2205 // -- r1 : the function to call (checked to be a JSFunction)
2206 // -----------------------------------
2207 __ AssertFunction(r1);
2208
2209 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2210 // Check that the function is not a "classConstructor".
2211 Label class_constructor;
2212 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2213 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
2214 __ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2215 __ b(ne, &class_constructor);
2216
2217 // Enter the context of the function; ToObject has to run in the function
2218 // context, and we also need to take the global proxy from the function
2219 // context in case of conversion.
2220 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2221 SharedFunctionInfo::kStrictModeByteOffset);
2222 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2223 // We need to convert the receiver for non-native sloppy mode functions.
2224 Label done_convert;
2225 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
2226 __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2227 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2228 __ b(ne, &done_convert);
2229 {
2230 // ----------- S t a t e -------------
2231 // -- r0 : the number of arguments (not including the receiver)
2232 // -- r1 : the function to call (checked to be a JSFunction)
2233 // -- r2 : the shared function info.
2234 // -- cp : the function context.
2235 // -----------------------------------
2236
2237 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2238 // Patch receiver to global proxy.
2239 __ LoadGlobalProxy(r3);
2240 } else {
2241 Label convert_to_object, convert_receiver;
2242 __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2243 __ JumpIfSmi(r3, &convert_to_object);
2244 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2245 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2246 __ b(hs, &done_convert);
2247 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2248 Label convert_global_proxy;
2249 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
2250 &convert_global_proxy);
2251 __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
2252 __ bind(&convert_global_proxy);
2253 {
2254 // Patch receiver to global proxy.
2255 __ LoadGlobalProxy(r3);
2256 }
2257 __ b(&convert_receiver);
2258 }
2259 __ bind(&convert_to_object);
2260 {
2261 // Convert receiver using ToObject.
2262 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2263 // in the fast case? (fall back to AllocateInNewSpace?)
2264 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2265 __ SmiTag(r0);
2266 __ Push(r0, r1);
2267 __ mov(r0, r3);
2268 ToObjectStub stub(masm->isolate());
2269 __ CallStub(&stub);
2270 __ mov(r3, r0);
2271 __ Pop(r0, r1);
2272 __ SmiUntag(r0);
2273 }
2274 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2275 __ bind(&convert_receiver);
2276 }
2277 __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2278 }
2279 __ bind(&done_convert);
2280
2281 // ----------- S t a t e -------------
2282 // -- r0 : the number of arguments (not including the receiver)
2283 // -- r1 : the function to call (checked to be a JSFunction)
2284 // -- r2 : the shared function info.
2285 // -- cp : the function context.
2286 // -----------------------------------
2287
Ben Murdoch097c5b22016-05-18 11:27:45 +01002288 if (tail_call_mode == TailCallMode::kAllow) {
2289 PrepareForTailCall(masm, r0, r3, r4, r5);
2290 }
2291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292 __ ldr(r2,
2293 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
2294 __ SmiUntag(r2);
2295 ParameterCount actual(r0);
2296 ParameterCount expected(r2);
2297 __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION,
2298 CheckDebugStepCallWrapper());
2299
2300 // The function is a "classConstructor", need to raise an exception.
2301 __ bind(&class_constructor);
2302 {
2303 FrameScope frame(masm, StackFrame::INTERNAL);
2304 __ push(r1);
2305 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2306 }
2307}
2308
2309
2310namespace {
2311
2312void Generate_PushBoundArguments(MacroAssembler* masm) {
2313 // ----------- S t a t e -------------
2314 // -- r0 : the number of arguments (not including the receiver)
2315 // -- r1 : target (checked to be a JSBoundFunction)
2316 // -- r3 : new.target (only in case of [[Construct]])
2317 // -----------------------------------
2318
2319 // Load [[BoundArguments]] into r2 and length of that into r4.
2320 Label no_bound_arguments;
2321 __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
2322 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2323 __ SmiUntag(r4);
2324 __ cmp(r4, Operand(0));
2325 __ b(eq, &no_bound_arguments);
2326 {
2327 // ----------- S t a t e -------------
2328 // -- r0 : the number of arguments (not including the receiver)
2329 // -- r1 : target (checked to be a JSBoundFunction)
2330 // -- r2 : the [[BoundArguments]] (implemented as FixedArray)
2331 // -- r3 : new.target (only in case of [[Construct]])
2332 // -- r4 : the number of [[BoundArguments]]
2333 // -----------------------------------
2334
2335 // Reserve stack space for the [[BoundArguments]].
2336 {
2337 Label done;
2338 __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2339 // Check the stack for overflow. We are not trying to catch interruptions
2340 // (i.e. debug break and preemption) here, so check the "real stack
2341 // limit".
2342 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2343 __ b(gt, &done); // Signed comparison.
2344 // Restore the stack pointer.
2345 __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2346 {
2347 FrameScope scope(masm, StackFrame::MANUAL);
2348 __ EnterFrame(StackFrame::INTERNAL);
2349 __ CallRuntime(Runtime::kThrowStackOverflow);
2350 }
2351 __ bind(&done);
2352 }
2353
2354 // Relocate arguments down the stack.
2355 {
2356 Label loop, done_loop;
2357 __ mov(r5, Operand(0));
2358 __ bind(&loop);
2359 __ cmp(r5, r0);
2360 __ b(gt, &done_loop);
2361 __ ldr(ip, MemOperand(sp, r4, LSL, kPointerSizeLog2));
2362 __ str(ip, MemOperand(sp, r5, LSL, kPointerSizeLog2));
2363 __ add(r4, r4, Operand(1));
2364 __ add(r5, r5, Operand(1));
2365 __ b(&loop);
2366 __ bind(&done_loop);
2367 }
2368
2369 // Copy [[BoundArguments]] to the stack (below the arguments).
2370 {
2371 Label loop;
2372 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2373 __ SmiUntag(r4);
2374 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2375 __ bind(&loop);
2376 __ sub(r4, r4, Operand(1), SetCC);
2377 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2));
2378 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2379 __ add(r0, r0, Operand(1));
2380 __ b(gt, &loop);
2381 }
2382 }
2383 __ bind(&no_bound_arguments);
2384}
2385
2386} // namespace
2387
2388
2389// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002390void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2391 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002392 // ----------- S t a t e -------------
2393 // -- r0 : the number of arguments (not including the receiver)
2394 // -- r1 : the function to call (checked to be a JSBoundFunction)
2395 // -----------------------------------
2396 __ AssertBoundFunction(r1);
2397
Ben Murdoch097c5b22016-05-18 11:27:45 +01002398 if (tail_call_mode == TailCallMode::kAllow) {
2399 PrepareForTailCall(masm, r0, r3, r4, r5);
2400 }
2401
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002402 // Patch the receiver to [[BoundThis]].
2403 __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
2404 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2405
2406 // Push the [[BoundArguments]] onto the stack.
2407 Generate_PushBoundArguments(masm);
2408
2409 // Call the [[BoundTargetFunction]] via the Call builtin.
2410 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2411 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2412 masm->isolate())));
2413 __ ldr(ip, MemOperand(ip));
2414 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2415}
2416
2417
2418// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002419void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2420 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002421 // ----------- S t a t e -------------
2422 // -- r0 : the number of arguments (not including the receiver)
2423 // -- r1 : the target to call (can be any Object).
2424 // -----------------------------------
2425
2426 Label non_callable, non_function, non_smi;
2427 __ JumpIfSmi(r1, &non_callable);
2428 __ bind(&non_smi);
2429 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002430 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002431 RelocInfo::CODE_TARGET, eq);
2432 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002433 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002434 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002435
2436 // Check if target has a [[Call]] internal method.
2437 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2438 __ tst(r4, Operand(1 << Map::kIsCallable));
2439 __ b(eq, &non_callable);
2440
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002441 __ cmp(r5, Operand(JS_PROXY_TYPE));
2442 __ b(ne, &non_function);
2443
Ben Murdoch097c5b22016-05-18 11:27:45 +01002444 // 0. Prepare for tail call if necessary.
2445 if (tail_call_mode == TailCallMode::kAllow) {
2446 PrepareForTailCall(masm, r0, r3, r4, r5);
2447 }
2448
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002449 // 1. Runtime fallback for Proxy [[Call]].
2450 __ Push(r1);
2451 // Increase the arguments size to include the pushed function and the
2452 // existing receiver on the stack.
2453 __ add(r0, r0, Operand(2));
2454 // Tail-call to the runtime.
2455 __ JumpToExternalReference(
2456 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2457
2458 // 2. Call to something else, which might have a [[Call]] internal method (if
2459 // not we raise an exception).
2460 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002461 // Overwrite the original receiver the (original) target.
2462 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2463 // Let the "call_as_function_delegate" take care of the rest.
2464 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
2465 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002466 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002467 RelocInfo::CODE_TARGET);
2468
2469 // 3. Call to something that is not callable.
2470 __ bind(&non_callable);
2471 {
2472 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2473 __ Push(r1);
2474 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2475 }
2476}
2477
2478
2479// static
2480void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2481 // ----------- S t a t e -------------
2482 // -- r0 : the number of arguments (not including the receiver)
2483 // -- r1 : the constructor to call (checked to be a JSFunction)
2484 // -- r3 : the new target (checked to be a constructor)
2485 // -----------------------------------
2486 __ AssertFunction(r1);
2487
2488 // Calling convention for function specific ConstructStubs require
2489 // r2 to contain either an AllocationSite or undefined.
2490 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2491
2492 // Tail call to the function-specific construct stub (still in the caller
2493 // context at this point).
2494 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2495 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2496 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2497}
2498
2499
2500// static
2501void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2502 // ----------- S t a t e -------------
2503 // -- r0 : the number of arguments (not including the receiver)
2504 // -- r1 : the function to call (checked to be a JSBoundFunction)
2505 // -- r3 : the new target (checked to be a constructor)
2506 // -----------------------------------
2507 __ AssertBoundFunction(r1);
2508
2509 // Push the [[BoundArguments]] onto the stack.
2510 Generate_PushBoundArguments(masm);
2511
2512 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2513 __ cmp(r1, r3);
2514 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2515 eq);
2516
2517 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2518 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2519 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2520 __ ldr(ip, MemOperand(ip));
2521 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2522}
2523
2524
2525// static
2526void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2527 // ----------- S t a t e -------------
2528 // -- r0 : the number of arguments (not including the receiver)
2529 // -- r1 : the constructor to call (checked to be a JSProxy)
2530 // -- r3 : the new target (either the same as the constructor or
2531 // the JSFunction on which new was invoked initially)
2532 // -----------------------------------
2533
2534 // Call into the Runtime for Proxy [[Construct]].
2535 __ Push(r1);
2536 __ Push(r3);
2537 // Include the pushed new_target, constructor and the receiver.
2538 __ add(r0, r0, Operand(3));
2539 // Tail-call to the runtime.
2540 __ JumpToExternalReference(
2541 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2542}
2543
2544
2545// static
2546void Builtins::Generate_Construct(MacroAssembler* masm) {
2547 // ----------- S t a t e -------------
2548 // -- r0 : the number of arguments (not including the receiver)
2549 // -- r1 : the constructor to call (can be any Object)
2550 // -- r3 : the new target (either the same as the constructor or
2551 // the JSFunction on which new was invoked initially)
2552 // -----------------------------------
2553
2554 // Check if target is a Smi.
2555 Label non_constructor;
2556 __ JumpIfSmi(r1, &non_constructor);
2557
2558 // Dispatch based on instance type.
2559 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
2560 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2561 RelocInfo::CODE_TARGET, eq);
2562
2563 // Check if target has a [[Construct]] internal method.
2564 __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
2565 __ tst(r2, Operand(1 << Map::kIsConstructor));
2566 __ b(eq, &non_constructor);
2567
2568 // Only dispatch to bound functions after checking whether they are
2569 // constructors.
2570 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2571 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2572 RelocInfo::CODE_TARGET, eq);
2573
2574 // Only dispatch to proxies after checking whether they are constructors.
2575 __ cmp(r5, Operand(JS_PROXY_TYPE));
2576 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2577 eq);
2578
2579 // Called Construct on an exotic Object with a [[Construct]] internal method.
2580 {
2581 // Overwrite the original receiver with the (original) target.
2582 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2583 // Let the "call_as_constructor_delegate" take care of the rest.
2584 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
2585 __ Jump(masm->isolate()->builtins()->CallFunction(),
2586 RelocInfo::CODE_TARGET);
2587 }
2588
2589 // Called Construct on an Object that doesn't have a [[Construct]] internal
2590 // method.
2591 __ bind(&non_constructor);
2592 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2593 RelocInfo::CODE_TARGET);
2594}
2595
Ben Murdochc5610432016-08-08 18:44:38 +01002596// static
2597void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2598 // ----------- S t a t e -------------
2599 // -- r1 : requested object size (untagged)
2600 // -- lr : return address
2601 // -----------------------------------
2602 __ SmiTag(r1);
2603 __ Push(r1);
2604 __ Move(cp, Smi::FromInt(0));
2605 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2606}
2607
2608// static
2609void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2610 // ----------- S t a t e -------------
2611 // -- r1 : requested object size (untagged)
2612 // -- lr : return address
2613 // -----------------------------------
2614 __ SmiTag(r1);
2615 __ Move(r2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2616 __ Push(r1, r2);
2617 __ Move(cp, Smi::FromInt(0));
2618 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2619}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002620
Steve Blocka7e24c12009-10-30 11:49:00 +00002621void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2622 // ----------- S t a t e -------------
2623 // -- r0 : actual number of arguments
2624 // -- r1 : function (passed through to callee)
2625 // -- r2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002626 // -- r3 : new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002627 // -----------------------------------
2628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002629 Label invoke, dont_adapt_arguments, stack_overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00002630
2631 Label enough, too_few;
Steve Block6ded16b2010-05-10 14:33:55 +01002632 __ cmp(r0, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002633 __ b(lt, &too_few);
2634 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2635 __ b(eq, &dont_adapt_arguments);
2636
2637 { // Enough parameters: actual >= expected
2638 __ bind(&enough);
2639 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002640 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Steve Blocka7e24c12009-10-30 11:49:00 +00002641
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002642 // Calculate copy start address into r0 and copy end address into r4.
Steve Blocka7e24c12009-10-30 11:49:00 +00002643 // r0: actual number of arguments as a smi
2644 // r1: function
2645 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002646 // r3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002647 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002648 // adjust for return address and receiver
2649 __ add(r0, r0, Operand(2 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002650 __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002651
2652 // Copy the arguments (including the receiver) to the new stack frame.
2653 // r0: copy start address
2654 // r1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002655 // r2: expected number of arguments
2656 // r3: new target (passed through to callee)
2657 // r4: copy end address
Steve Blocka7e24c12009-10-30 11:49:00 +00002658
2659 Label copy;
2660 __ bind(&copy);
2661 __ ldr(ip, MemOperand(r0, 0));
2662 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002663 __ cmp(r0, r4); // Compare before moving to next argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00002664 __ sub(r0, r0, Operand(kPointerSize));
2665 __ b(ne, &copy);
2666
2667 __ b(&invoke);
2668 }
2669
2670 { // Too few parameters: Actual < expected
2671 __ bind(&too_few);
2672 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002673 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Steve Blocka7e24c12009-10-30 11:49:00 +00002674
2675 // Calculate copy start address into r0 and copy end address is fp.
2676 // r0: actual number of arguments as a smi
2677 // r1: function
2678 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002679 // r3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002680 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002681
2682 // Copy the arguments (including the receiver) to the new stack frame.
2683 // r0: copy start address
2684 // r1: function
2685 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002686 // r3: new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002687 Label copy;
2688 __ bind(&copy);
2689 // Adjust load for return address and receiver.
2690 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
2691 __ push(ip);
2692 __ cmp(r0, fp); // Compare before moving to next argument.
2693 __ sub(r0, r0, Operand(kPointerSize));
2694 __ b(ne, &copy);
2695
2696 // Fill the remaining expected arguments with undefined.
2697 // r1: function
2698 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002699 // r3: new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002700 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002701 __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002702 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002703 __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002704 2 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002705
2706 Label fill;
2707 __ bind(&fill);
2708 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002709 __ cmp(sp, r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002710 __ b(ne, &fill);
2711 }
2712
2713 // Call the entry point.
2714 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002715 __ mov(r0, r2);
2716 // r0 : expected number of arguments
2717 // r1 : function (passed through to callee)
2718 // r3 : new target (passed through to callee)
2719 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2720 __ Call(r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002721
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002722 // Store offset of return address for deoptimizer.
2723 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2724
Steve Blocka7e24c12009-10-30 11:49:00 +00002725 // Exit frame and return.
2726 LeaveArgumentsAdaptorFrame(masm);
2727 __ Jump(lr);
2728
2729
2730 // -------------------------------------------
2731 // Dont adapt arguments.
2732 // -------------------------------------------
2733 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002734 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2735 __ Jump(r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002736
2737 __ bind(&stack_overflow);
2738 {
2739 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002740 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002741 __ bkpt(0);
2742 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002743}
2744
2745
2746#undef __
2747
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002748} // namespace internal
2749} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01002750
2751#endif // V8_TARGET_ARCH_ARM