blob: 1fffcb67e596dd76147c9b4808358b491c90722e [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
Leon Clarkee46be812010-01-19 14:06:41 +000020void Builtins::Generate_Adaptor(MacroAssembler* masm,
21 CFunctionId id,
22 BuiltinExtraArguments extra_args) {
23 // ----------- S t a t e -------------
24 // -- r0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025 // -- r1 : target
26 // -- r3 : new.target
Leon Clarkee46be812010-01-19 14:06:41 +000027 // -- sp[0] : last argument
28 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 // -- sp[4 * (argc - 1)] : first argument
Leon Clarkee46be812010-01-19 14:06:41 +000030 // -- sp[4 * argc] : receiver
31 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(r1);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +000039
Leon Clarkee46be812010-01-19 14:06:41 +000040 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 switch (extra_args) {
43 case BuiltinExtraArguments::kTarget:
44 __ Push(r1);
45 ++num_extra_args;
46 break;
47 case BuiltinExtraArguments::kNewTarget:
48 __ Push(r3);
49 ++num_extra_args;
50 break;
51 case BuiltinExtraArguments::kTargetAndNewTarget:
52 __ Push(r1, r3);
53 num_extra_args += 2;
54 break;
55 case BuiltinExtraArguments::kNone:
56 break;
Leon Clarkee46be812010-01-19 14:06:41 +000057 }
58
Steve Block6ded16b2010-05-10 14:33:55 +010059 // JumpToExternalReference expects r0 to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000060 // including the receiver and the extra arguments.
61 __ add(r0, r0, Operand(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062
Steve Block44f0eee2011-05-26 01:26:41 +010063 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000064}
65
66
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067// Load the built-in InternalArray function from the current context.
68static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
69 Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 // Load the InternalArray function from the current native context.
71 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010072}
73
74
Steve Blocka7e24c12009-10-30 11:49:00 +000075// Load the built-in Array function from the current context.
76static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000077 // Load the Array function from the current native context.
78 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Steve Blocka7e24c12009-10-30 11:49:00 +000079}
80
81
Ben Murdoch3ef787d2012-04-12 10:51:47 +010082void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
83 // ----------- S t a t e -------------
84 // -- r0 : number of arguments
85 // -- lr : return address
86 // -- sp[...]: constructor arguments
87 // -----------------------------------
88 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
89
90 // Get the InternalArray function.
91 GenerateLoadInternalArrayFunction(masm, r1);
92
93 if (FLAG_debug_code) {
94 // Initial map for the builtin InternalArray functions should be maps.
95 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 __ SmiTst(r2);
97 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010098 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100100 }
101
102 // Run the native code for the InternalArray function called as a normal
103 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 // tail call a stub
105 InternalArrayConstructorStub stub(masm->isolate());
106 __ TailCallStub(&stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000107}
108
109
110void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
111 // ----------- S t a t e -------------
112 // -- r0 : number of arguments
113 // -- lr : return address
114 // -- sp[...]: constructor arguments
115 // -----------------------------------
116 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
117
118 // Get the Array function.
119 GenerateLoadArrayFunction(masm, r1);
120
121 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100122 // Initial map for the builtin Array functions should be maps.
Steve Blocka7e24c12009-10-30 11:49:00 +0000123 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 __ SmiTst(r2);
125 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
Steve Blocka7e24c12009-10-30 11:49:00 +0000126 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 }
129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 __ mov(r3, r1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132 // tail call a stub
133 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
134 ArrayConstructorStub stub(masm->isolate());
135 __ TailCallStub(&stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000136}
137
138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100140void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
141 // ----------- S t a t e -------------
142 // -- r0 : number of arguments
143 // -- lr : return address
144 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
145 // -- sp[(argc + 1) * 8] : receiver
146 // -----------------------------------
147 Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
148 Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
149 Heap::RootListIndex const root_index =
150 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
151 : Heap::kMinusInfinityValueRootIndex;
152 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
153
154 // Load the accumulator with the default return value (either -Infinity or
155 // +Infinity), with the tagged value in r1 and the double value in d1.
156 __ LoadRoot(r1, root_index);
157 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
158
159 // Remember how many slots to drop (including the receiver).
160 __ add(r4, r0, Operand(1));
161
162 Label done_loop, loop;
163 __ bind(&loop);
164 {
165 // Check if all parameters done.
166 __ sub(r0, r0, Operand(1), SetCC);
167 __ b(lt, &done_loop);
168
169 // Load the next parameter tagged value into r2.
170 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
171
172 // Load the double value of the parameter into d2, maybe converting the
173 // parameter to a number first using the ToNumberStub if necessary.
174 Label convert, convert_smi, convert_number, done_convert;
175 __ bind(&convert);
176 __ JumpIfSmi(r2, &convert_smi);
177 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
178 __ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
179 {
180 // Parameter is not a Number, use the ToNumberStub to convert it.
181 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
182 __ SmiTag(r0);
183 __ SmiTag(r4);
184 __ Push(r0, r1, r4);
185 __ mov(r0, r2);
186 ToNumberStub stub(masm->isolate());
187 __ CallStub(&stub);
188 __ mov(r2, r0);
189 __ Pop(r0, r1, r4);
190 {
191 // Restore the double accumulator value (d1).
192 Label done_restore;
193 __ SmiToDouble(d1, r1);
194 __ JumpIfSmi(r1, &done_restore);
195 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
196 __ bind(&done_restore);
197 }
198 __ SmiUntag(r4);
199 __ SmiUntag(r0);
200 }
201 __ b(&convert);
202 __ bind(&convert_number);
203 __ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
204 __ b(&done_convert);
205 __ bind(&convert_smi);
206 __ SmiToDouble(d2, r2);
207 __ bind(&done_convert);
208
209 // Perform the actual comparison with the accumulator value on the left hand
210 // side (d1) and the next parameter value on the right hand side (d2).
211 Label compare_nan, compare_swap;
212 __ VFPCompareAndSetFlags(d1, d2);
213 __ b(cc_done, &loop);
214 __ b(cc_swap, &compare_swap);
215 __ b(vs, &compare_nan);
216
217 // Left and right hand side are equal, check for -0 vs. +0.
218 __ VmovHigh(ip, reg);
219 __ cmp(ip, Operand(0x80000000));
220 __ b(ne, &loop);
221
222 // Result is on the right hand side.
223 __ bind(&compare_swap);
224 __ vmov(d1, d2);
225 __ mov(r1, r2);
226 __ b(&loop);
227
228 // At least one side is NaN, which means that the result will be NaN too.
229 __ bind(&compare_nan);
230 __ LoadRoot(r1, Heap::kNanValueRootIndex);
231 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
232 __ b(&loop);
233 }
234
235 __ bind(&done_loop);
236 __ mov(r0, r1);
237 __ Drop(r4);
238 __ Ret();
239}
240
241// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800243 // ----------- S t a t e -------------
244 // -- r0 : number of arguments
245 // -- r1 : constructor function
246 // -- lr : return address
247 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
248 // -- sp[argc * 4] : receiver
249 // -----------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800250
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 // 1. Load the first argument into r0 and get rid of the rest (including the
252 // receiver).
253 Label no_arguments;
254 {
255 __ sub(r0, r0, Operand(1), SetCC);
256 __ b(lo, &no_arguments);
257 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
258 __ Drop(2);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800259 }
260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 // 2a. Convert the first argument to a number.
262 ToNumberStub stub(masm->isolate());
263 __ TailCallStub(&stub);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 // 2b. No arguments, return +0.
266 __ bind(&no_arguments);
267 __ Move(r0, Smi::FromInt(0));
268 __ Ret(1);
269}
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271
272// static
273void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800274 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 // -- r0 : number of arguments
276 // -- r1 : constructor function
277 // -- r3 : new target
278 // -- lr : return address
279 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
280 // -- sp[argc * 4] : receiver
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800281 // -----------------------------------
282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 // 1. Make sure we operate in the context of the called function.
284 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800285
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 // 2. Load the first argument into r2 and get rid of the rest (including the
287 // receiver).
288 {
289 Label no_arguments, done;
290 __ sub(r0, r0, Operand(1), SetCC);
291 __ b(lo, &no_arguments);
292 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
293 __ Drop(2);
294 __ b(&done);
295 __ bind(&no_arguments);
296 __ Move(r2, Smi::FromInt(0));
297 __ Drop(1);
298 __ bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800299 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800300
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301 // 3. Make sure r2 is a number.
302 {
303 Label done_convert;
304 __ JumpIfSmi(r2, &done_convert);
305 __ CompareObjectType(r2, r4, r4, HEAP_NUMBER_TYPE);
306 __ b(eq, &done_convert);
307 {
308 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
309 __ Push(r1, r3);
310 __ Move(r0, r2);
311 ToNumberStub stub(masm->isolate());
312 __ CallStub(&stub);
313 __ Move(r2, r0);
314 __ Pop(r1, r3);
315 }
316 __ bind(&done_convert);
317 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 // 4. Check if new target and constructor differ.
320 Label new_object;
321 __ cmp(r1, r3);
322 __ b(ne, &new_object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800323
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 // 5. Allocate a JSValue wrapper for the number.
325 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800326 __ Ret();
327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 // 6. Fallback to the runtime to create new object.
329 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100330 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100332 __ Push(r2); // first argument
333 FastNewObjectStub stub(masm->isolate());
334 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000335 __ Pop(r2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100336 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
338 __ Ret();
339}
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800340
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000341
342// static
343void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
344 // ----------- S t a t e -------------
345 // -- r0 : number of arguments
346 // -- r1 : constructor function
347 // -- lr : return address
348 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
349 // -- sp[argc * 4] : receiver
350 // -----------------------------------
351
352 // 1. Load the first argument into r0 and get rid of the rest (including the
353 // receiver).
354 Label no_arguments;
355 {
356 __ sub(r0, r0, Operand(1), SetCC);
357 __ b(lo, &no_arguments);
358 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
359 __ Drop(2);
360 }
361
362 // 2a. At least one argument, return r0 if it's a string, otherwise
363 // dispatch to appropriate conversion.
364 Label to_string, symbol_descriptive_string;
365 {
366 __ JumpIfSmi(r0, &to_string);
367 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
368 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
369 __ b(hi, &to_string);
370 __ b(eq, &symbol_descriptive_string);
371 __ Ret();
372 }
373
374 // 2b. No arguments, return the empty string (and pop the receiver).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800375 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 {
377 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
378 __ Ret(1);
379 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800380
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000381 // 3a. Convert r0 to a string.
382 __ bind(&to_string);
383 {
384 ToStringStub stub(masm->isolate());
385 __ TailCallStub(&stub);
386 }
387
388 // 3b. Convert symbol in r0 to a string.
389 __ bind(&symbol_descriptive_string);
390 {
391 __ Push(r0);
392 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
393 }
394}
395
396
397// static
398void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
399 // ----------- S t a t e -------------
400 // -- r0 : number of arguments
401 // -- r1 : constructor function
402 // -- r3 : new target
403 // -- lr : return address
404 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
405 // -- sp[argc * 4] : receiver
406 // -----------------------------------
407
408 // 1. Make sure we operate in the context of the called function.
409 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
410
411 // 2. Load the first argument into r2 and get rid of the rest (including the
412 // receiver).
413 {
414 Label no_arguments, done;
415 __ sub(r0, r0, Operand(1), SetCC);
416 __ b(lo, &no_arguments);
417 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
418 __ Drop(2);
419 __ b(&done);
420 __ bind(&no_arguments);
421 __ LoadRoot(r2, Heap::kempty_stringRootIndex);
422 __ Drop(1);
423 __ bind(&done);
424 }
425
426 // 3. Make sure r2 is a string.
427 {
428 Label convert, done_convert;
429 __ JumpIfSmi(r2, &convert);
430 __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
431 __ b(lo, &done_convert);
432 __ bind(&convert);
433 {
434 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
435 ToStringStub stub(masm->isolate());
436 __ Push(r1, r3);
437 __ Move(r0, r2);
438 __ CallStub(&stub);
439 __ Move(r2, r0);
440 __ Pop(r1, r3);
441 }
442 __ bind(&done_convert);
443 }
444
445 // 4. Check if new target and constructor differ.
446 Label new_object;
447 __ cmp(r1, r3);
448 __ b(ne, &new_object);
449
450 // 5. Allocate a JSValue wrapper for the string.
451 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
452 __ Ret();
453
454 // 6. Fallback to the runtime to create new object.
455 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100458 __ Push(r2); // first argument
459 FastNewObjectStub stub(masm->isolate());
460 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000461 __ Pop(r2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100462 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000463 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800464 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100465}
466
467
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000468static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
469 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
470 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
471 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
472 __ Jump(r2);
473}
474
Ben Murdoch097c5b22016-05-18 11:27:45 +0100475static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
476 Runtime::FunctionId function_id) {
477 // ----------- S t a t e -------------
478 // -- r0 : argument count (preserved for callee)
479 // -- r1 : target function (preserved for callee)
480 // -- r3 : new target (preserved for callee)
481 // -----------------------------------
482 {
483 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
484 // Push the number of arguments to the callee.
485 __ SmiTag(r0);
486 __ push(r0);
487 // Push a copy of the target function and the new target.
488 __ push(r1);
489 __ push(r3);
490 // Push function as parameter to the runtime call.
491 __ Push(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492
Ben Murdoch097c5b22016-05-18 11:27:45 +0100493 __ CallRuntime(function_id, 1);
494 __ mov(r2, r0);
495
496 // Restore target function and new target.
497 __ pop(r3);
498 __ pop(r1);
499 __ pop(r0);
500 __ SmiUntag(r0, r0);
501 }
502 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
503 __ Jump(r2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504}
505
506
507void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
508 // Checking whether the queued function is ready for install is optional,
509 // since we come across interrupts and stack checks elsewhere. However,
510 // not checking may delay installing ready functions, and always checking
511 // would be quite expensive. A good compromise is to first check against
512 // stack limit as a cue for an interrupt signal.
513 Label ok;
514 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
515 __ cmp(sp, Operand(ip));
516 __ b(hs, &ok);
517
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000519
520 __ bind(&ok);
521 GenerateTailCallToSharedCode(masm);
522}
523
524
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525static void Generate_JSConstructStubHelper(MacroAssembler* masm,
526 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100527 bool create_implicit_receiver,
528 bool check_derived_construct) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000529 // ----------- S t a t e -------------
530 // -- r0 : number of arguments
531 // -- r1 : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532 // -- r2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 // -- r3 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100534 // -- cp : context
Steve Blocka7e24c12009-10-30 11:49:00 +0000535 // -- lr : return address
536 // -- sp[...]: constructor arguments
537 // -----------------------------------
538
Steve Block44f0eee2011-05-26 01:26:41 +0100539 Isolate* isolate = masm->isolate();
540
Steve Blocka7e24c12009-10-30 11:49:00 +0000541 // Enter a construct frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100542 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000543 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 // Preserve the incoming parameters on the stack.
546 __ AssertUndefinedOrAllocationSite(r2, r4);
Ben Murdochda12d292016-06-02 14:46:10 +0100547 __ Push(cp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000548 __ SmiTag(r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100549 __ Push(r2, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100552 // Allocate the new receiver object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000553 __ Push(r1, r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100554 FastNewObjectStub stub(masm->isolate());
555 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 __ mov(r4, r0);
557 __ Pop(r1, r3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100558
Ben Murdoch097c5b22016-05-18 11:27:45 +0100559 // ----------- S t a t e -------------
560 // -- r1: constructor function
561 // -- r3: new target
562 // -- r4: newly allocated object
563 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100564
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565 // Retrieve smi-tagged arguments count from the stack.
566 __ ldr(r0, MemOperand(sp));
Steve Blocka7e24c12009-10-30 11:49:00 +0000567 }
568
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 __ SmiUntag(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000571 if (create_implicit_receiver) {
572 // Push the allocated receiver to the stack. We need two copies
573 // because we may have to return the original one and the calling
574 // conventions dictate that the called function pops the receiver.
575 __ push(r4);
576 __ push(r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000578 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000580
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100581 // Set up pointer to last argument.
582 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000583
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100584 // Copy arguments and receiver to the expression stack.
585 // r0: number of arguments
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000586 // r1: constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100587 // r2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000588 // r3: new target
589 // r4: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100590 // sp[0]: receiver
591 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 // sp[2]: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593 Label loop, entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594 __ SmiTag(r4, r0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100595 __ b(&entry);
596 __ bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000597 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100598 __ push(ip);
599 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600 __ sub(r4, r4, Operand(2), SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100601 __ b(ge, &loop);
602
603 // Call the function.
604 // r0: number of arguments
605 // r1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000606 // r3: new target
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100607 if (is_api_function) {
608 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
609 Handle<Code> code =
610 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100612 } else {
613 ParameterCount actual(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614 __ InvokeFunction(r1, r3, actual, CALL_FUNCTION,
615 CheckDebugStepCallWrapper());
Steve Blocka7e24c12009-10-30 11:49:00 +0000616 }
617
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100618 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
621 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000622
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100623 // Restore context from the frame.
624 // r0: result
625 // sp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000626 // sp[1]: number of arguments (smi-tagged)
Ben Murdochda12d292016-06-02 14:46:10 +0100627 __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629 if (create_implicit_receiver) {
630 // If the result is an object (in the ECMA sense), we should get rid
631 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
632 // on page 74.
633 Label use_receiver, exit;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635 // If the result is a smi, it is *not* an object in the ECMA sense.
636 // r0: result
637 // sp[0]: receiver
638 // sp[1]: number of arguments (smi-tagged)
639 __ JumpIfSmi(r0, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000641 // If the type of the result (stored in its map) is less than
642 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
643 __ CompareObjectType(r0, r1, r3, FIRST_JS_RECEIVER_TYPE);
644 __ b(ge, &exit);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100645
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 // Throw away the result of the constructor invocation and use the
647 // on-stack receiver as the result.
648 __ bind(&use_receiver);
649 __ ldr(r0, MemOperand(sp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100650
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000651 // Remove receiver from the stack, remove caller arguments, and
652 // return.
653 __ bind(&exit);
654 // r0: result
655 // sp[0]: receiver (newly allocated object)
656 // sp[1]: number of arguments (smi-tagged)
657 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
658 } else {
659 __ ldr(r1, MemOperand(sp));
660 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100661
662 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000663 }
664
Ben Murdoch097c5b22016-05-18 11:27:45 +0100665 // ES6 9.2.2. Step 13+
666 // Check that the result is not a Smi, indicating that the constructor result
667 // from a derived class is neither undefined nor an Object.
668 if (check_derived_construct) {
669 Label dont_throw;
670 __ JumpIfNotSmi(r0, &dont_throw);
671 {
672 FrameScope scope(masm, StackFrame::INTERNAL);
673 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
674 }
675 __ bind(&dont_throw);
676 }
677
Steve Blocka7e24c12009-10-30 11:49:00 +0000678 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
679 __ add(sp, sp, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680 if (create_implicit_receiver) {
681 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
682 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000683 __ Jump(lr);
684}
685
686
Leon Clarkee46be812010-01-19 14:06:41 +0000687void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100688 Generate_JSConstructStubHelper(masm, false, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000689}
690
691
692void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100693 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694}
695
696
697void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100698 Generate_JSConstructStubHelper(masm, false, false, false);
699}
700
701
702void Builtins::Generate_JSBuiltinsConstructStubForDerived(
703 MacroAssembler* masm) {
704 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705}
706
707
708void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
709 FrameScope scope(masm, StackFrame::INTERNAL);
710 __ push(r1);
711 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
712}
713
714
715enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
716
717
718// Clobbers r2; preserves all other registers.
719static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
720 IsTagged argc_is_tagged) {
721 // Check the stack for overflow. We are not trying to catch
722 // interruptions (e.g. debug break and preemption) here, so the "real stack
723 // limit" is checked.
724 Label okay;
725 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
726 // Make r2 the space we have left. The stack might already be overflowed
727 // here which will cause r2 to become negative.
728 __ sub(r2, sp, r2);
729 // Check if the arguments will overflow the stack.
730 if (argc_is_tagged == kArgcIsSmiTagged) {
731 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
732 } else {
733 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
734 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
735 }
736 __ b(gt, &okay); // Signed comparison.
737
738 // Out of stack space.
739 __ CallRuntime(Runtime::kThrowStackOverflow);
740
741 __ bind(&okay);
Leon Clarkee46be812010-01-19 14:06:41 +0000742}
743
744
Steve Blocka7e24c12009-10-30 11:49:00 +0000745static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
746 bool is_construct) {
747 // Called from Generate_JS_Entry
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000748 // r0: new.target
Steve Blocka7e24c12009-10-30 11:49:00 +0000749 // r1: function
750 // r2: receiver
751 // r3: argc
752 // r4: argv
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000753 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000754 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +0000755
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100757 {
758 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000759
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000760 // Setup the context (we need to use the caller context from the isolate).
761 ExternalReference context_address(Isolate::kContextAddress,
762 masm->isolate());
763 __ mov(cp, Operand(context_address));
764 __ ldr(cp, MemOperand(cp));
Steve Blocka7e24c12009-10-30 11:49:00 +0000765
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100766 __ InitializeRootRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000767
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100768 // Push the function and the receiver onto the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000769 __ Push(r1, r2);
770
771 // Check if we have enough stack space to push all arguments.
772 // Clobbers r2.
773 Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
774
775 // Remember new.target.
776 __ mov(r5, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000777
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100778 // Copy arguments to the stack in a loop.
779 // r1: function
780 // r3: argc
781 // r4: argv, i.e. points to first arg
782 Label loop, entry;
783 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
784 // r2 points past last arg.
785 __ b(&entry);
786 __ bind(&loop);
787 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
788 __ ldr(r0, MemOperand(r0)); // dereference handle
789 __ push(r0); // push parameter
790 __ bind(&entry);
791 __ cmp(r4, r2);
792 __ b(ne, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000793
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 // Setup new.target and argc.
795 __ mov(r0, Operand(r3));
796 __ mov(r3, Operand(r5));
797
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100798 // Initialize all JavaScript callee-saved registers, since they will be seen
799 // by the garbage collector as part of handlers.
800 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
801 __ mov(r5, Operand(r4));
802 __ mov(r6, Operand(r4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000803 if (!FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000804 __ mov(r8, Operand(r4));
805 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100806 if (kR9Available == 1) {
807 __ mov(r9, Operand(r4));
808 }
809
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 // Invoke the code.
811 Handle<Code> builtin = is_construct
812 ? masm->isolate()->builtins()->Construct()
813 : masm->isolate()->builtins()->Call();
814 __ Call(builtin, RelocInfo::CODE_TARGET);
815
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100816 // Exit the JS frame and remove the parameters (except function), and
817 // return.
818 // Respect ABI stack constraint.
Steve Blocka7e24c12009-10-30 11:49:00 +0000819 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000820 __ Jump(lr);
821
822 // r0: result
823}
824
825
826void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
827 Generate_JSEntryTrampolineHelper(masm, false);
828}
829
830
831void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
832 Generate_JSEntryTrampolineHelper(masm, true);
833}
834
835
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000836// Generate code for entering a JS function with the interpreter.
837// On entry to the function the receiver and arguments have been pushed on the
838// stack left to right. The actual argument count matches the formal parameter
839// count expected by the function.
840//
841// The live registers are:
842// o r1: the JS function object being called.
843// o r3: the new target
844// o cp: our context
845// o pp: the caller's constant pool pointer (if enabled)
846// o fp: the caller's frame pointer
847// o sp: stack pointer
848// o lr: return address
849//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100850// The function builds an interpreter frame. See InterpreterFrameConstants in
851// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000852void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
853 // Open a frame scope to indicate that there is a frame on the stack. The
854 // MANUAL indicates that the scope shouldn't actually generate code to set up
855 // the frame (that is done below).
856 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +0100857 __ PushStandardFrame(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000858
859 // Get the bytecode array from the function object and load the pointer to the
860 // first entry into kInterpreterBytecodeRegister.
861 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100862 Register debug_info = kInterpreterBytecodeArrayRegister;
863 DCHECK(!debug_info.is(r0));
864 __ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
865 __ cmp(debug_info, Operand(DebugInfo::uninitialized()));
866 // Load original bytecode array or the debug copy.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000867 __ ldr(kInterpreterBytecodeArrayRegister,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100868 FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
869 __ ldr(kInterpreterBytecodeArrayRegister,
870 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex), ne);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000871
872 if (FLAG_debug_code) {
873 // Check function data field is actually a BytecodeArray object.
874 __ SmiTst(kInterpreterBytecodeArrayRegister);
875 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
876 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
877 BYTECODE_ARRAY_TYPE);
878 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
879 }
880
Ben Murdoch097c5b22016-05-18 11:27:45 +0100881 // Push new.target, bytecode array and zero for bytecode array offset.
882 __ mov(r0, Operand(0));
883 __ Push(r3, kInterpreterBytecodeArrayRegister, r0);
884
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000885 // Allocate the local and temporary register file on the stack.
886 {
887 // Load frame size from the BytecodeArray object.
888 __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
889 BytecodeArray::kFrameSizeOffset));
890
891 // Do a stack check to ensure we don't go over the limit.
892 Label ok;
893 __ sub(r9, sp, Operand(r4));
894 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
895 __ cmp(r9, Operand(r2));
896 __ b(hs, &ok);
897 __ CallRuntime(Runtime::kThrowStackOverflow);
898 __ bind(&ok);
899
900 // If ok, push undefined as the initial value for all register file entries.
901 Label loop_header;
902 Label loop_check;
903 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
904 __ b(&loop_check, al);
905 __ bind(&loop_header);
906 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
907 __ push(r9);
908 // Continue loop if not done.
909 __ bind(&loop_check);
910 __ sub(r4, r4, Operand(kPointerSize), SetCC);
911 __ b(&loop_header, ge);
912 }
913
914 // TODO(rmcilroy): List of things not currently dealt with here but done in
915 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000916 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000917 // - Code aging of the BytecodeArray object.
918
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000919 // Load accumulator, register file, bytecode offset, dispatch table into
920 // registers.
921 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
922 __ add(kInterpreterRegisterFileRegister, fp,
923 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
924 __ mov(kInterpreterBytecodeOffsetRegister,
925 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100926 __ mov(kInterpreterDispatchTableRegister,
927 Operand(ExternalReference::interpreter_dispatch_table_address(
928 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000929
930 // Dispatch to the first bytecode handler for the function.
931 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
932 kInterpreterBytecodeOffsetRegister));
933 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
934 kPointerSizeLog2));
935 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
936 // and header removal.
937 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
938 __ Call(ip);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100939
940 // Even though the first bytecode handler was called, we will never return.
941 __ Abort(kUnexpectedReturnFromBytecodeHandler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942}
943
944
945void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
946 // TODO(rmcilroy): List of things not currently dealt with here but done in
947 // fullcodegen's EmitReturnSequence.
948 // - Supporting FLAG_trace for Runtime::TraceExit.
949 // - Support profiler (specifically decrementing profiling_counter
950 // appropriately and calling out to HandleInterrupts if necessary).
951
952 // The return value is in accumulator, which is already in r0.
953
954 // Leave the frame (also dropping the register file).
955 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
956
957 // Drop receiver + arguments and return.
958 __ ldr(ip, FieldMemOperand(kInterpreterBytecodeArrayRegister,
959 BytecodeArray::kParameterSizeOffset));
960 __ add(sp, sp, ip, LeaveCC);
961 __ Jump(lr);
962}
963
964
965static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
966 Register limit, Register scratch) {
967 Label loop_header, loop_check;
968 __ b(al, &loop_check);
969 __ bind(&loop_header);
970 __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
971 __ push(scratch);
972 __ bind(&loop_check);
973 __ cmp(index, limit);
974 __ b(gt, &loop_header);
975}
976
977
978// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100979void Builtins::Generate_InterpreterPushArgsAndCallImpl(
980 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981 // ----------- S t a t e -------------
982 // -- r0 : the number of arguments (not including the receiver)
983 // -- r2 : the address of the first argument to be pushed. Subsequent
984 // arguments should be consecutive above this, in the same order as
985 // they are to be pushed onto the stack.
986 // -- r1 : the target to call (can be any Object).
987 // -----------------------------------
988
989 // Find the address of the last argument.
990 __ add(r3, r0, Operand(1)); // Add one for receiver.
991 __ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
992 __ sub(r3, r2, r3);
993
994 // Push the arguments.
995 Generate_InterpreterPushArgs(masm, r2, r3, r4);
996
997 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100998 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
999 tail_call_mode),
1000 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001}
1002
1003
1004// static
1005void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1006 // ----------- S t a t e -------------
1007 // -- r0 : argument count (not including receiver)
1008 // -- r3 : new target
1009 // -- r1 : constructor to call
1010 // -- r2 : address of the first argument
1011 // -----------------------------------
1012
1013 // Find the address of the last argument.
1014 __ mov(r4, Operand(r0, LSL, kPointerSizeLog2));
1015 __ sub(r4, r2, r4);
1016
1017 // Push a slot for the receiver to be constructed.
1018 __ mov(ip, Operand::Zero());
1019 __ push(ip);
1020
1021 // Push the arguments.
1022 Generate_InterpreterPushArgs(masm, r2, r4, r5);
1023
1024 // Call the constructor with r0, r1, and r3 unmodified.
1025 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1026}
1027
1028
Ben Murdoch097c5b22016-05-18 11:27:45 +01001029static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001030 // Initialize register file register and dispatch table register.
1031 __ add(kInterpreterRegisterFileRegister, fp,
1032 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001033 __ mov(kInterpreterDispatchTableRegister,
1034 Operand(ExternalReference::interpreter_dispatch_table_address(
1035 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001036
1037 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 __ ldr(kContextRegister,
1039 MemOperand(kInterpreterRegisterFileRegister,
1040 InterpreterFrameConstants::kContextFromRegisterPointer));
1041
1042 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001043 __ ldr(
1044 kInterpreterBytecodeArrayRegister,
1045 MemOperand(kInterpreterRegisterFileRegister,
1046 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001047
1048 if (FLAG_debug_code) {
1049 // Check function data field is actually a BytecodeArray object.
1050 __ SmiTst(kInterpreterBytecodeArrayRegister);
1051 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1052 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1053 BYTECODE_ARRAY_TYPE);
1054 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1055 }
1056
1057 // Get the target bytecode offset from the frame.
1058 __ ldr(kInterpreterBytecodeOffsetRegister,
1059 MemOperand(
1060 kInterpreterRegisterFileRegister,
1061 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1062 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1063
1064 // Dispatch to the target bytecode.
1065 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1066 kInterpreterBytecodeOffsetRegister));
1067 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1068 kPointerSizeLog2));
1069 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1070 __ mov(pc, ip);
1071}
1072
1073
Ben Murdoch097c5b22016-05-18 11:27:45 +01001074static void Generate_InterpreterNotifyDeoptimizedHelper(
1075 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1076 // Enter an internal frame.
1077 {
1078 FrameScope scope(masm, StackFrame::INTERNAL);
1079
1080 // Pass the deoptimization type to the runtime system.
1081 __ mov(r1, Operand(Smi::FromInt(static_cast<int>(type))));
1082 __ push(r1);
1083 __ CallRuntime(Runtime::kNotifyDeoptimized);
1084 // Tear down internal frame.
1085 }
1086
1087 // Drop state (we don't use these for interpreter deopts) and and pop the
1088 // accumulator value into the accumulator register.
1089 __ Drop(1);
1090 __ Pop(kInterpreterAccumulatorRegister);
1091
1092 // Enter the bytecode dispatch.
1093 Generate_EnterBytecodeDispatch(masm);
1094}
1095
1096
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1098 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1099}
1100
1101
1102void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1103 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1104}
1105
1106
1107void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1108 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1109}
1110
Ben Murdoch097c5b22016-05-18 11:27:45 +01001111void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1112 // Set the address of the interpreter entry trampoline as a return address.
1113 // This simulates the initial call to bytecode handlers in interpreter entry
1114 // trampoline. The return will never actually be taken, but our stack walker
1115 // uses this address to determine whether a frame is interpreted.
1116 __ Move(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1117
1118 Generate_EnterBytecodeDispatch(masm);
1119}
1120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001121
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001122void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001123 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Iain Merrick75681382010-08-19 15:07:18 +01001124}
1125
1126
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001128 GenerateTailCallToReturnedCode(masm,
1129 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130}
1131
1132
1133void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001134 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135}
1136
1137
1138static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1139 // For now, we are relying on the fact that make_code_young doesn't do any
1140 // garbage collection which allows us to save/restore the registers without
1141 // worrying about which of them contain pointers. We also don't build an
1142 // internal frame to make the code faster, since we shouldn't have to do stack
1143 // crawls in MakeCodeYoung. This seems a bit fragile.
1144
1145 // The following registers must be saved and restored when calling through to
1146 // the runtime:
1147 // r0 - contains return address (beginning of patch sequence)
1148 // r1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149 // r3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001152 __ PrepareCallCFunction(2, 0, r2);
1153 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1154 __ CallCFunction(
1155 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001157 __ mov(pc, r0);
1158}
1159
1160#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1161void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1162 MacroAssembler* masm) { \
1163 GenerateMakeCodeYoungAgainCommon(masm); \
1164} \
1165void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1166 MacroAssembler* masm) { \
1167 GenerateMakeCodeYoungAgainCommon(masm); \
1168}
1169CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1170#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1171
1172
1173void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1174 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1175 // that make_code_young doesn't do any garbage collection which allows us to
1176 // save/restore the registers without worrying about which of them contain
1177 // pointers.
1178
1179 // The following registers must be saved and restored when calling through to
1180 // the runtime:
1181 // r0 - contains return address (beginning of patch sequence)
1182 // r1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001183 // r3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001184 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001185 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001186 __ PrepareCallCFunction(2, 0, r2);
1187 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1188 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
1189 masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001190 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191
1192 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001193 __ PushStandardFrame(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001194
1195 // Jump to point after the code-age stub.
1196 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1197 __ mov(pc, r0);
1198}
1199
1200
1201void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1202 GenerateMakeCodeYoungAgainCommon(masm);
1203}
1204
1205
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001206void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1207 Generate_MarkCodeAsExecutedOnce(masm);
1208}
1209
1210
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001211static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1212 SaveFPRegsMode save_doubles) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001213 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001215
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001216 // Preserve registers across notification, this is important for compiled
1217 // stubs that tail call the runtime on deopts passing their parameters in
1218 // registers.
1219 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1220 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001221 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001222 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001223 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001224
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001225 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1226 __ mov(pc, lr); // Jump to miss handler
1227}
1228
1229
1230void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1231 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1232}
1233
1234
1235void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1236 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001237}
1238
1239
1240static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1241 Deoptimizer::BailoutType type) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001242 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001243 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001244 // Pass the function and deoptimization type to the runtime system.
1245 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1246 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001247 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001248 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001249
1250 // Get the full codegen state from the stack and untag it -> r6.
1251 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1252 __ SmiUntag(r6);
1253 // Switch on the state.
1254 Label with_tos_register, unknown_state;
1255 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1256 __ b(ne, &with_tos_register);
1257 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1258 __ Ret();
1259
1260 __ bind(&with_tos_register);
1261 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1262 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1263 __ b(ne, &unknown_state);
1264 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1265 __ Ret();
1266
1267 __ bind(&unknown_state);
1268 __ stop("no cases left");
1269}
1270
1271
1272void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1273 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1274}
1275
1276
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001277void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1278 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1279}
1280
1281
Ben Murdochb0fe1622011-05-05 13:52:32 +01001282void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1283 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1284}
1285
1286
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001287static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1288 Register function_template_info,
1289 Register scratch0, Register scratch1,
1290 Register scratch2,
1291 Label* receiver_check_failed) {
1292 Register signature = scratch0;
1293 Register map = scratch1;
1294 Register constructor = scratch2;
1295
1296 // If there is no signature, return the holder.
1297 __ ldr(signature, FieldMemOperand(function_template_info,
1298 FunctionTemplateInfo::kSignatureOffset));
1299 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1300 Label receiver_check_passed;
1301 __ b(eq, &receiver_check_passed);
1302
1303 // Walk the prototype chain.
1304 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1305 Label prototype_loop_start;
1306 __ bind(&prototype_loop_start);
1307
1308 // Get the constructor, if any.
1309 __ GetMapConstructor(constructor, map, ip, ip);
1310 __ cmp(ip, Operand(JS_FUNCTION_TYPE));
1311 Label next_prototype;
1312 __ b(ne, &next_prototype);
1313 Register type = constructor;
1314 __ ldr(type,
1315 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1316 __ ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1317
1318 // Loop through the chain of inheriting function templates.
1319 Label function_template_loop;
1320 __ bind(&function_template_loop);
1321
1322 // If the signatures match, we have a compatible receiver.
1323 __ cmp(signature, type);
1324 __ b(eq, &receiver_check_passed);
1325
1326 // If the current type is not a FunctionTemplateInfo, load the next prototype
1327 // in the chain.
1328 __ JumpIfSmi(type, &next_prototype);
1329 __ CompareObjectType(type, ip, ip, FUNCTION_TEMPLATE_INFO_TYPE);
1330
1331 // Otherwise load the parent function template and iterate.
1332 __ ldr(type,
1333 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset),
1334 eq);
1335 __ b(&function_template_loop, eq);
1336
1337 // Load the next prototype.
1338 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001339 __ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001340 __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001341 __ b(eq, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001342 __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1343 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001344 // Iterate.
1345 __ b(&prototype_loop_start);
1346
1347 __ bind(&receiver_check_passed);
1348}
1349
1350
1351void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1352 // ----------- S t a t e -------------
1353 // -- r0 : number of arguments excluding receiver
1354 // -- r1 : callee
1355 // -- lr : return address
1356 // -- sp[0] : last argument
1357 // -- ...
1358 // -- sp[4 * (argc - 1)] : first argument
1359 // -- sp[4 * argc] : receiver
1360 // -----------------------------------
1361
1362 // Load the FunctionTemplateInfo.
1363 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1364 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1365
1366 // Do the compatible receiver check.
1367 Label receiver_check_failed;
1368 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1369 CompatibleReceiverCheck(masm, r2, r3, r4, r5, r6, &receiver_check_failed);
1370
1371 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1372 // beginning of the code.
1373 __ ldr(r4, FieldMemOperand(r3, FunctionTemplateInfo::kCallCodeOffset));
1374 __ ldr(r4, FieldMemOperand(r4, CallHandlerInfo::kFastHandlerOffset));
1375 __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1376 __ Jump(r4);
1377
1378 // Compatible receiver check failed: throw an Illegal Invocation exception.
1379 __ bind(&receiver_check_failed);
1380 // Drop the arguments (including the receiver)
1381 __ add(r0, r0, Operand(1));
1382 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1383 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1384}
1385
1386
Ben Murdochb0fe1622011-05-05 13:52:32 +01001387void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388 // Lookup the function in the JavaScript frame.
Steve Block1e0659c2011-05-24 12:43:12 +01001389 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001390 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1392 // Pass function as argument.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001393 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001394 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001395 }
Steve Block1e0659c2011-05-24 12:43:12 +01001396
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001397 // If the code object is null, just return to the unoptimized code.
Steve Block1e0659c2011-05-24 12:43:12 +01001398 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001399 __ cmp(r0, Operand(Smi::FromInt(0)));
Steve Block1e0659c2011-05-24 12:43:12 +01001400 __ b(ne, &skip);
1401 __ Ret();
1402
1403 __ bind(&skip);
Steve Block1e0659c2011-05-24 12:43:12 +01001404
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001405 // Load deoptimization data from the code object.
1406 // <deopt_data> = <code>[#deoptimization_data_offset]
1407 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1408
1409 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001410 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1411
1412 if (FLAG_enable_embedded_constant_pool) {
1413 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001414 }
1415
1416 // Load the OSR entrypoint offset from the deoptimization data.
1417 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1418 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1419 DeoptimizationInputData::kOsrPcOffsetIndex)));
1420
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001421 // Compute the target address = code start + osr_offset
1422 __ add(lr, r0, Operand::SmiUntag(r1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001423
1424 // And "return" to the OSR entry point of the function.
1425 __ Ret();
1426 }
1427}
1428
1429
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001430// static
1431void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1432 int field_index) {
1433 // ----------- S t a t e -------------
1434 // -- lr : return address
1435 // -- sp[0] : receiver
1436 // -----------------------------------
1437
1438 // 1. Pop receiver into r0 and check that it's actually a JSDate object.
1439 Label receiver_not_date;
1440 {
1441 __ Pop(r0);
1442 __ JumpIfSmi(r0, &receiver_not_date);
1443 __ CompareObjectType(r0, r1, r2, JS_DATE_TYPE);
1444 __ b(ne, &receiver_not_date);
1445 }
1446
1447 // 2. Load the specified date field, falling back to the runtime as necessary.
1448 if (field_index == JSDate::kDateValue) {
1449 __ ldr(r0, FieldMemOperand(r0, JSDate::kValueOffset));
1450 } else {
1451 if (field_index < JSDate::kFirstUncachedField) {
1452 Label stamp_mismatch;
1453 __ mov(r1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1454 __ ldr(r1, MemOperand(r1));
1455 __ ldr(ip, FieldMemOperand(r0, JSDate::kCacheStampOffset));
1456 __ cmp(r1, ip);
1457 __ b(ne, &stamp_mismatch);
1458 __ ldr(r0, FieldMemOperand(
1459 r0, JSDate::kValueOffset + field_index * kPointerSize));
1460 __ Ret();
1461 __ bind(&stamp_mismatch);
1462 }
1463 FrameScope scope(masm, StackFrame::INTERNAL);
1464 __ PrepareCallCFunction(2, r1);
1465 __ mov(r1, Operand(Smi::FromInt(field_index)));
1466 __ CallCFunction(
1467 ExternalReference::get_date_field_function(masm->isolate()), 2);
1468 }
1469 __ Ret();
1470
1471 // 3. Raise a TypeError if the receiver is not a date.
1472 __ bind(&receiver_not_date);
1473 __ TailCallRuntime(Runtime::kThrowNotDateError);
1474}
1475
Ben Murdochda12d292016-06-02 14:46:10 +01001476// static
1477void Builtins::Generate_FunctionHasInstance(MacroAssembler* masm) {
1478 // ----------- S t a t e -------------
1479 // -- r0 : argc
1480 // -- sp[0] : first argument (left-hand side)
1481 // -- sp[4] : receiver (right-hand side)
1482 // -----------------------------------
1483
1484 {
1485 FrameScope scope(masm, StackFrame::INTERNAL);
1486 __ ldr(InstanceOfDescriptor::LeftRegister(),
1487 MemOperand(fp, 2 * kPointerSize)); // Load left-hand side.
1488 __ ldr(InstanceOfDescriptor::RightRegister(),
1489 MemOperand(fp, 3 * kPointerSize)); // Load right-hand side.
1490 InstanceOfStub stub(masm->isolate(), true);
1491 __ CallStub(&stub);
1492 }
1493
1494 // Pop the argument and the receiver.
1495 __ Ret(2);
1496}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001497
1498// static
1499void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1500 // ----------- S t a t e -------------
1501 // -- r0 : argc
1502 // -- sp[0] : argArray
1503 // -- sp[4] : thisArg
1504 // -- sp[8] : receiver
1505 // -----------------------------------
1506
1507 // 1. Load receiver into r1, argArray into r0 (if present), remove all
1508 // arguments from the stack (including the receiver), and push thisArg (if
1509 // present) instead.
1510 {
1511 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1512 __ mov(r3, r2);
1513 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1514 __ sub(r4, r0, Operand(1), SetCC);
1515 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArg
1516 __ sub(r4, r4, Operand(1), SetCC, ge);
1517 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argArray
1518 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1519 __ str(r2, MemOperand(sp, 0));
1520 __ mov(r0, r3);
1521 }
1522
1523 // ----------- S t a t e -------------
1524 // -- r0 : argArray
1525 // -- r1 : receiver
1526 // -- sp[0] : thisArg
1527 // -----------------------------------
1528
1529 // 2. Make sure the receiver is actually callable.
1530 Label receiver_not_callable;
1531 __ JumpIfSmi(r1, &receiver_not_callable);
1532 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1533 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1534 __ tst(r4, Operand(1 << Map::kIsCallable));
1535 __ b(eq, &receiver_not_callable);
1536
1537 // 3. Tail call with no arguments if argArray is null or undefined.
1538 Label no_arguments;
1539 __ JumpIfRoot(r0, Heap::kNullValueRootIndex, &no_arguments);
1540 __ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &no_arguments);
1541
1542 // 4a. Apply the receiver to the given argArray (passing undefined for
1543 // new.target).
1544 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1545 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1546
1547 // 4b. The argArray is either null or undefined, so we tail call without any
1548 // arguments to the receiver.
1549 __ bind(&no_arguments);
1550 {
1551 __ mov(r0, Operand(0));
1552 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1553 }
1554
1555 // 4c. The receiver is not callable, throw an appropriate TypeError.
1556 __ bind(&receiver_not_callable);
1557 {
1558 __ str(r1, MemOperand(sp, 0));
1559 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1560 }
1561}
1562
1563
1564// static
1565void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001566 // 1. Make sure we have at least one argument.
Andrei Popescu402d9372010-02-26 13:31:12 +00001567 // r0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001568 {
1569 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001570 __ cmp(r0, Operand::Zero());
Steve Blocka7e24c12009-10-30 11:49:00 +00001571 __ b(ne, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 __ PushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001573 __ add(r0, r0, Operand(1));
1574 __ bind(&done);
1575 }
1576
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001577 // 2. Get the callable to call (passed as receiver) from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +00001578 // r0: actual number of arguments
Andrei Popescu402d9372010-02-26 13:31:12 +00001579 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581 // 3. Shift arguments and return address one slot down on the stack
Andrei Popescu402d9372010-02-26 13:31:12 +00001582 // (overwriting the original receiver). Adjust argument count to make
1583 // the original first argument the new receiver.
1584 // r0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 // r1: callable
1586 {
1587 Label loop;
Steve Blocka7e24c12009-10-30 11:49:00 +00001588 // Calculate the copy start address (destination). Copy end address is sp.
1589 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001590
1591 __ bind(&loop);
1592 __ ldr(ip, MemOperand(r2, -kPointerSize));
1593 __ str(ip, MemOperand(r2));
1594 __ sub(r2, r2, Operand(kPointerSize));
1595 __ cmp(r2, sp);
1596 __ b(ne, &loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001597 // Adjust the actual number of arguments and remove the top element
1598 // (which is a copy of the last argument).
1599 __ sub(r0, r0, Operand(1));
1600 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001601 }
1602
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001603 // 4. Call the callable.
1604 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00001605}
1606
1607
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001608void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1609 // ----------- S t a t e -------------
1610 // -- r0 : argc
1611 // -- sp[0] : argumentsList
1612 // -- sp[4] : thisArgument
1613 // -- sp[8] : target
1614 // -- sp[12] : receiver
1615 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00001616
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
1618 // remove all arguments from the stack (including the receiver), and push
1619 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001620 {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001621 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001622 __ mov(r2, r1);
1623 __ mov(r3, r1);
1624 __ sub(r4, r0, Operand(1), SetCC);
1625 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1626 __ sub(r4, r4, Operand(1), SetCC, ge);
1627 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArgument
1628 __ sub(r4, r4, Operand(1), SetCC, ge);
1629 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1630 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1631 __ str(r2, MemOperand(sp, 0));
1632 __ mov(r0, r3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001633 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634
1635 // ----------- S t a t e -------------
1636 // -- r0 : argumentsList
1637 // -- r1 : target
1638 // -- sp[0] : thisArgument
1639 // -----------------------------------
1640
1641 // 2. Make sure the target is actually callable.
1642 Label target_not_callable;
1643 __ JumpIfSmi(r1, &target_not_callable);
1644 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1645 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1646 __ tst(r4, Operand(1 << Map::kIsCallable));
1647 __ b(eq, &target_not_callable);
1648
1649 // 3a. Apply the target to the given argumentsList (passing undefined for
1650 // new.target).
1651 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1652 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1653
1654 // 3b. The target is not callable, throw an appropriate TypeError.
1655 __ bind(&target_not_callable);
1656 {
1657 __ str(r1, MemOperand(sp, 0));
1658 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1659 }
1660}
1661
1662
1663void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1664 // ----------- S t a t e -------------
1665 // -- r0 : argc
1666 // -- sp[0] : new.target (optional)
1667 // -- sp[4] : argumentsList
1668 // -- sp[8] : target
1669 // -- sp[12] : receiver
1670 // -----------------------------------
1671
1672 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
1673 // new.target into r3 (if present, otherwise use target), remove all
1674 // arguments from the stack (including the receiver), and push thisArgument
1675 // (if present) instead.
1676 {
1677 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1678 __ mov(r2, r1);
1679 __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1680 __ sub(r4, r0, Operand(1), SetCC);
1681 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1682 __ mov(r3, r1); // new.target defaults to target
1683 __ sub(r4, r4, Operand(1), SetCC, ge);
1684 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1685 __ sub(r4, r4, Operand(1), SetCC, ge);
1686 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // new.target
1687 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1688 __ mov(r0, r2);
1689 }
1690
1691 // ----------- S t a t e -------------
1692 // -- r0 : argumentsList
1693 // -- r3 : new.target
1694 // -- r1 : target
1695 // -- sp[0] : receiver (undefined)
1696 // -----------------------------------
1697
1698 // 2. Make sure the target is actually a constructor.
1699 Label target_not_constructor;
1700 __ JumpIfSmi(r1, &target_not_constructor);
1701 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1702 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1703 __ tst(r4, Operand(1 << Map::kIsConstructor));
1704 __ b(eq, &target_not_constructor);
1705
1706 // 3. Make sure the target is actually a constructor.
1707 Label new_target_not_constructor;
1708 __ JumpIfSmi(r3, &new_target_not_constructor);
1709 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
1710 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1711 __ tst(r4, Operand(1 << Map::kIsConstructor));
1712 __ b(eq, &new_target_not_constructor);
1713
1714 // 4a. Construct the target with the given new.target and argumentsList.
1715 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1716
1717 // 4b. The target is not a constructor, throw an appropriate TypeError.
1718 __ bind(&target_not_constructor);
1719 {
1720 __ str(r1, MemOperand(sp, 0));
1721 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1722 }
1723
1724 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1725 __ bind(&new_target_not_constructor);
1726 {
1727 __ str(r3, MemOperand(sp, 0));
1728 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1729 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001730}
1731
1732
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001733static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1734 Label* stack_overflow) {
1735 // ----------- S t a t e -------------
1736 // -- r0 : actual number of arguments
1737 // -- r1 : function (passed through to callee)
1738 // -- r2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 // -- r3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740 // -----------------------------------
1741 // Check the stack for overflow. We are not trying to catch
1742 // interruptions (e.g. debug break and preemption) here, so the "real stack
1743 // limit" is checked.
1744 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1745 // Make r5 the space we have left. The stack might already be overflowed
1746 // here which will cause r5 to become negative.
1747 __ sub(r5, sp, r5);
1748 // Check if the arguments will overflow the stack.
1749 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
1750 __ b(le, stack_overflow); // Signed comparison.
1751}
1752
1753
Steve Blocka7e24c12009-10-30 11:49:00 +00001754static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 __ SmiTag(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001756 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001758 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
1759 fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 __ add(fp, sp,
1761 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001762}
1763
1764
1765static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1766 // ----------- S t a t e -------------
1767 // -- r0 : result being passed through
1768 // -----------------------------------
1769 // Get the number of arguments passed (as a smi), tear down the frame and
1770 // then tear down the parameters.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1772 kPointerSize)));
1773
1774 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1775 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001776 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1777}
1778
1779
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780// static
1781void Builtins::Generate_Apply(MacroAssembler* masm) {
1782 // ----------- S t a t e -------------
1783 // -- r0 : argumentsList
1784 // -- r1 : target
1785 // -- r3 : new.target (checked to be constructor or undefined)
1786 // -- sp[0] : thisArgument
1787 // -----------------------------------
1788
1789 // Create the list of arguments from the array-like argumentsList.
1790 {
1791 Label create_arguments, create_array, create_runtime, done_create;
1792 __ JumpIfSmi(r0, &create_runtime);
1793
1794 // Load the map of argumentsList into r2.
1795 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1796
1797 // Load native context into r4.
1798 __ ldr(r4, NativeContextMemOperand());
1799
1800 // Check if argumentsList is an (unmodified) arguments object.
1801 __ ldr(ip, ContextMemOperand(r4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1802 __ cmp(ip, r2);
1803 __ b(eq, &create_arguments);
1804 __ ldr(ip, ContextMemOperand(r4, Context::STRICT_ARGUMENTS_MAP_INDEX));
1805 __ cmp(ip, r2);
1806 __ b(eq, &create_arguments);
1807
1808 // Check if argumentsList is a fast JSArray.
1809 __ CompareInstanceType(r2, ip, JS_ARRAY_TYPE);
1810 __ b(eq, &create_array);
1811
1812 // Ask the runtime to create the list (actually a FixedArray).
1813 __ bind(&create_runtime);
1814 {
1815 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1816 __ Push(r1, r3, r0);
1817 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1818 __ Pop(r1, r3);
1819 __ ldr(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
1820 __ SmiUntag(r2);
1821 }
1822 __ jmp(&done_create);
1823
1824 // Try to create the list from an arguments object.
1825 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001826 __ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827 __ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1828 __ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
1829 __ cmp(r2, ip);
1830 __ b(ne, &create_runtime);
1831 __ SmiUntag(r2);
1832 __ mov(r0, r4);
1833 __ b(&done_create);
1834
1835 // Try to create the list from a JSArray object.
1836 __ bind(&create_array);
1837 __ ldr(r2, FieldMemOperand(r2, Map::kBitField2Offset));
1838 __ DecodeField<Map::ElementsKindBits>(r2);
1839 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1840 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1841 STATIC_ASSERT(FAST_ELEMENTS == 2);
1842 __ cmp(r2, Operand(FAST_ELEMENTS));
1843 __ b(hi, &create_runtime);
1844 __ cmp(r2, Operand(FAST_HOLEY_SMI_ELEMENTS));
1845 __ b(eq, &create_runtime);
1846 __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
1847 __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
1848 __ SmiUntag(r2);
1849
1850 __ bind(&done_create);
1851 }
1852
1853 // Check for stack overflow.
1854 {
1855 // Check the stack for overflow. We are not trying to catch interruptions
1856 // (i.e. debug break and preemption) here, so check the "real stack limit".
1857 Label done;
1858 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1859 // Make ip the space we have left. The stack might already be overflowed
1860 // here which will cause ip to become negative.
1861 __ sub(ip, sp, ip);
1862 // Check if the arguments will overflow the stack.
1863 __ cmp(ip, Operand(r2, LSL, kPointerSizeLog2));
1864 __ b(gt, &done); // Signed comparison.
1865 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1866 __ bind(&done);
1867 }
1868
1869 // ----------- S t a t e -------------
1870 // -- r1 : target
1871 // -- r0 : args (a FixedArray built from argumentsList)
1872 // -- r2 : len (number of elements to push from args)
1873 // -- r3 : new.target (checked to be constructor or undefined)
1874 // -- sp[0] : thisArgument
1875 // -----------------------------------
1876
1877 // Push arguments onto the stack (thisArgument is already on the stack).
1878 {
1879 __ mov(r4, Operand(0));
1880 Label done, loop;
1881 __ bind(&loop);
1882 __ cmp(r4, r2);
1883 __ b(eq, &done);
1884 __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2));
1885 __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize));
1886 __ Push(ip);
1887 __ add(r4, r4, Operand(1));
1888 __ b(&loop);
1889 __ bind(&done);
1890 __ Move(r0, r4);
1891 }
1892
1893 // Dispatch to Call or Construct depending on whether new.target is undefined.
1894 {
1895 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
1896 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1897 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1898 }
1899}
1900
Ben Murdoch097c5b22016-05-18 11:27:45 +01001901namespace {
1902
1903// Drops top JavaScript frame and an arguments adaptor frame below it (if
1904// present) preserving all the arguments prepared for current call.
1905// Does nothing if debugger is currently active.
1906// ES6 14.6.3. PrepareForTailCall
1907//
1908// Stack structure for the function g() tail calling f():
1909//
1910// ------- Caller frame: -------
1911// | ...
1912// | g()'s arg M
1913// | ...
1914// | g()'s arg 1
1915// | g()'s receiver arg
1916// | g()'s caller pc
1917// ------- g()'s frame: -------
1918// | g()'s caller fp <- fp
1919// | g()'s context
1920// | function pointer: g
1921// | -------------------------
1922// | ...
1923// | ...
1924// | f()'s arg N
1925// | ...
1926// | f()'s arg 1
1927// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1928// ----------------------
1929//
1930void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1931 Register scratch1, Register scratch2,
1932 Register scratch3) {
1933 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1934 Comment cmnt(masm, "[ PrepareForTailCall");
1935
Ben Murdochda12d292016-06-02 14:46:10 +01001936 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001937 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01001938 ExternalReference is_tail_call_elimination_enabled =
1939 ExternalReference::is_tail_call_elimination_enabled_address(
1940 masm->isolate());
1941 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001942 __ ldrb(scratch1, MemOperand(scratch1));
1943 __ cmp(scratch1, Operand(0));
Ben Murdochda12d292016-06-02 14:46:10 +01001944 __ b(eq, &done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001945
1946 // Drop possible interpreter handler/stub frame.
1947 {
1948 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01001949 __ ldr(scratch3,
1950 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001951 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
1952 __ b(ne, &no_interpreter_frame);
1953 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1954 __ bind(&no_interpreter_frame);
1955 }
1956
1957 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01001958 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001959 Label no_arguments_adaptor, formal_parameter_count_loaded;
1960 __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1961 __ ldr(scratch3,
Ben Murdochda12d292016-06-02 14:46:10 +01001962 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001963 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1964 __ b(ne, &no_arguments_adaptor);
1965
Ben Murdochda12d292016-06-02 14:46:10 +01001966 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001967 __ mov(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01001968 __ ldr(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001969 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01001970 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001971 __ b(&formal_parameter_count_loaded);
1972
1973 __ bind(&no_arguments_adaptor);
1974 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01001975 __ ldr(scratch1,
1976 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001977 __ ldr(scratch1,
1978 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01001979 __ ldr(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001980 FieldMemOperand(scratch1,
1981 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01001982 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001983
1984 __ bind(&formal_parameter_count_loaded);
1985
Ben Murdochda12d292016-06-02 14:46:10 +01001986 ParameterCount callee_args_count(args_reg);
1987 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
1988 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001989 __ bind(&done);
1990}
1991} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001992
1993// static
1994void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001995 ConvertReceiverMode mode,
1996 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001997 // ----------- S t a t e -------------
1998 // -- r0 : the number of arguments (not including the receiver)
1999 // -- r1 : the function to call (checked to be a JSFunction)
2000 // -----------------------------------
2001 __ AssertFunction(r1);
2002
2003 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2004 // Check that the function is not a "classConstructor".
2005 Label class_constructor;
2006 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2007 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
2008 __ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2009 __ b(ne, &class_constructor);
2010
2011 // Enter the context of the function; ToObject has to run in the function
2012 // context, and we also need to take the global proxy from the function
2013 // context in case of conversion.
2014 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2015 SharedFunctionInfo::kStrictModeByteOffset);
2016 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2017 // We need to convert the receiver for non-native sloppy mode functions.
2018 Label done_convert;
2019 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
2020 __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2021 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2022 __ b(ne, &done_convert);
2023 {
2024 // ----------- S t a t e -------------
2025 // -- r0 : the number of arguments (not including the receiver)
2026 // -- r1 : the function to call (checked to be a JSFunction)
2027 // -- r2 : the shared function info.
2028 // -- cp : the function context.
2029 // -----------------------------------
2030
2031 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2032 // Patch receiver to global proxy.
2033 __ LoadGlobalProxy(r3);
2034 } else {
2035 Label convert_to_object, convert_receiver;
2036 __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2037 __ JumpIfSmi(r3, &convert_to_object);
2038 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2039 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2040 __ b(hs, &done_convert);
2041 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2042 Label convert_global_proxy;
2043 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
2044 &convert_global_proxy);
2045 __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
2046 __ bind(&convert_global_proxy);
2047 {
2048 // Patch receiver to global proxy.
2049 __ LoadGlobalProxy(r3);
2050 }
2051 __ b(&convert_receiver);
2052 }
2053 __ bind(&convert_to_object);
2054 {
2055 // Convert receiver using ToObject.
2056 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2057 // in the fast case? (fall back to AllocateInNewSpace?)
2058 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2059 __ SmiTag(r0);
2060 __ Push(r0, r1);
2061 __ mov(r0, r3);
2062 ToObjectStub stub(masm->isolate());
2063 __ CallStub(&stub);
2064 __ mov(r3, r0);
2065 __ Pop(r0, r1);
2066 __ SmiUntag(r0);
2067 }
2068 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2069 __ bind(&convert_receiver);
2070 }
2071 __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2072 }
2073 __ bind(&done_convert);
2074
2075 // ----------- S t a t e -------------
2076 // -- r0 : the number of arguments (not including the receiver)
2077 // -- r1 : the function to call (checked to be a JSFunction)
2078 // -- r2 : the shared function info.
2079 // -- cp : the function context.
2080 // -----------------------------------
2081
Ben Murdoch097c5b22016-05-18 11:27:45 +01002082 if (tail_call_mode == TailCallMode::kAllow) {
2083 PrepareForTailCall(masm, r0, r3, r4, r5);
2084 }
2085
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002086 __ ldr(r2,
2087 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
2088 __ SmiUntag(r2);
2089 ParameterCount actual(r0);
2090 ParameterCount expected(r2);
2091 __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION,
2092 CheckDebugStepCallWrapper());
2093
2094 // The function is a "classConstructor", need to raise an exception.
2095 __ bind(&class_constructor);
2096 {
2097 FrameScope frame(masm, StackFrame::INTERNAL);
2098 __ push(r1);
2099 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2100 }
2101}
2102
2103
2104namespace {
2105
2106void Generate_PushBoundArguments(MacroAssembler* masm) {
2107 // ----------- S t a t e -------------
2108 // -- r0 : the number of arguments (not including the receiver)
2109 // -- r1 : target (checked to be a JSBoundFunction)
2110 // -- r3 : new.target (only in case of [[Construct]])
2111 // -----------------------------------
2112
2113 // Load [[BoundArguments]] into r2 and length of that into r4.
2114 Label no_bound_arguments;
2115 __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
2116 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2117 __ SmiUntag(r4);
2118 __ cmp(r4, Operand(0));
2119 __ b(eq, &no_bound_arguments);
2120 {
2121 // ----------- S t a t e -------------
2122 // -- r0 : the number of arguments (not including the receiver)
2123 // -- r1 : target (checked to be a JSBoundFunction)
2124 // -- r2 : the [[BoundArguments]] (implemented as FixedArray)
2125 // -- r3 : new.target (only in case of [[Construct]])
2126 // -- r4 : the number of [[BoundArguments]]
2127 // -----------------------------------
2128
2129 // Reserve stack space for the [[BoundArguments]].
2130 {
2131 Label done;
2132 __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2133 // Check the stack for overflow. We are not trying to catch interruptions
2134 // (i.e. debug break and preemption) here, so check the "real stack
2135 // limit".
2136 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2137 __ b(gt, &done); // Signed comparison.
2138 // Restore the stack pointer.
2139 __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2140 {
2141 FrameScope scope(masm, StackFrame::MANUAL);
2142 __ EnterFrame(StackFrame::INTERNAL);
2143 __ CallRuntime(Runtime::kThrowStackOverflow);
2144 }
2145 __ bind(&done);
2146 }
2147
2148 // Relocate arguments down the stack.
2149 {
2150 Label loop, done_loop;
2151 __ mov(r5, Operand(0));
2152 __ bind(&loop);
2153 __ cmp(r5, r0);
2154 __ b(gt, &done_loop);
2155 __ ldr(ip, MemOperand(sp, r4, LSL, kPointerSizeLog2));
2156 __ str(ip, MemOperand(sp, r5, LSL, kPointerSizeLog2));
2157 __ add(r4, r4, Operand(1));
2158 __ add(r5, r5, Operand(1));
2159 __ b(&loop);
2160 __ bind(&done_loop);
2161 }
2162
2163 // Copy [[BoundArguments]] to the stack (below the arguments).
2164 {
2165 Label loop;
2166 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2167 __ SmiUntag(r4);
2168 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2169 __ bind(&loop);
2170 __ sub(r4, r4, Operand(1), SetCC);
2171 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2));
2172 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2173 __ add(r0, r0, Operand(1));
2174 __ b(gt, &loop);
2175 }
2176 }
2177 __ bind(&no_bound_arguments);
2178}
2179
2180} // namespace
2181
2182
2183// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002184void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2185 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002186 // ----------- S t a t e -------------
2187 // -- r0 : the number of arguments (not including the receiver)
2188 // -- r1 : the function to call (checked to be a JSBoundFunction)
2189 // -----------------------------------
2190 __ AssertBoundFunction(r1);
2191
Ben Murdoch097c5b22016-05-18 11:27:45 +01002192 if (tail_call_mode == TailCallMode::kAllow) {
2193 PrepareForTailCall(masm, r0, r3, r4, r5);
2194 }
2195
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002196 // Patch the receiver to [[BoundThis]].
2197 __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
2198 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2199
2200 // Push the [[BoundArguments]] onto the stack.
2201 Generate_PushBoundArguments(masm);
2202
2203 // Call the [[BoundTargetFunction]] via the Call builtin.
2204 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2205 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2206 masm->isolate())));
2207 __ ldr(ip, MemOperand(ip));
2208 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2209}
2210
2211
2212// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002213void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2214 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002215 // ----------- S t a t e -------------
2216 // -- r0 : the number of arguments (not including the receiver)
2217 // -- r1 : the target to call (can be any Object).
2218 // -----------------------------------
2219
2220 Label non_callable, non_function, non_smi;
2221 __ JumpIfSmi(r1, &non_callable);
2222 __ bind(&non_smi);
2223 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002224 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002225 RelocInfo::CODE_TARGET, eq);
2226 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002227 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002228 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002229
2230 // Check if target has a [[Call]] internal method.
2231 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2232 __ tst(r4, Operand(1 << Map::kIsCallable));
2233 __ b(eq, &non_callable);
2234
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002235 __ cmp(r5, Operand(JS_PROXY_TYPE));
2236 __ b(ne, &non_function);
2237
Ben Murdoch097c5b22016-05-18 11:27:45 +01002238 // 0. Prepare for tail call if necessary.
2239 if (tail_call_mode == TailCallMode::kAllow) {
2240 PrepareForTailCall(masm, r0, r3, r4, r5);
2241 }
2242
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002243 // 1. Runtime fallback for Proxy [[Call]].
2244 __ Push(r1);
2245 // Increase the arguments size to include the pushed function and the
2246 // existing receiver on the stack.
2247 __ add(r0, r0, Operand(2));
2248 // Tail-call to the runtime.
2249 __ JumpToExternalReference(
2250 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2251
2252 // 2. Call to something else, which might have a [[Call]] internal method (if
2253 // not we raise an exception).
2254 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002255 // Overwrite the original receiver the (original) target.
2256 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2257 // Let the "call_as_function_delegate" take care of the rest.
2258 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
2259 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002260 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002261 RelocInfo::CODE_TARGET);
2262
2263 // 3. Call to something that is not callable.
2264 __ bind(&non_callable);
2265 {
2266 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2267 __ Push(r1);
2268 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2269 }
2270}
2271
2272
2273// static
2274void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2275 // ----------- S t a t e -------------
2276 // -- r0 : the number of arguments (not including the receiver)
2277 // -- r1 : the constructor to call (checked to be a JSFunction)
2278 // -- r3 : the new target (checked to be a constructor)
2279 // -----------------------------------
2280 __ AssertFunction(r1);
2281
2282 // Calling convention for function specific ConstructStubs require
2283 // r2 to contain either an AllocationSite or undefined.
2284 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2285
2286 // Tail call to the function-specific construct stub (still in the caller
2287 // context at this point).
2288 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2289 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2290 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2291}
2292
2293
2294// static
2295void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2296 // ----------- S t a t e -------------
2297 // -- r0 : the number of arguments (not including the receiver)
2298 // -- r1 : the function to call (checked to be a JSBoundFunction)
2299 // -- r3 : the new target (checked to be a constructor)
2300 // -----------------------------------
2301 __ AssertBoundFunction(r1);
2302
2303 // Push the [[BoundArguments]] onto the stack.
2304 Generate_PushBoundArguments(masm);
2305
2306 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2307 __ cmp(r1, r3);
2308 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2309 eq);
2310
2311 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2312 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2313 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2314 __ ldr(ip, MemOperand(ip));
2315 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2316}
2317
2318
2319// static
2320void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2321 // ----------- S t a t e -------------
2322 // -- r0 : the number of arguments (not including the receiver)
2323 // -- r1 : the constructor to call (checked to be a JSProxy)
2324 // -- r3 : the new target (either the same as the constructor or
2325 // the JSFunction on which new was invoked initially)
2326 // -----------------------------------
2327
2328 // Call into the Runtime for Proxy [[Construct]].
2329 __ Push(r1);
2330 __ Push(r3);
2331 // Include the pushed new_target, constructor and the receiver.
2332 __ add(r0, r0, Operand(3));
2333 // Tail-call to the runtime.
2334 __ JumpToExternalReference(
2335 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2336}
2337
2338
2339// static
2340void Builtins::Generate_Construct(MacroAssembler* masm) {
2341 // ----------- S t a t e -------------
2342 // -- r0 : the number of arguments (not including the receiver)
2343 // -- r1 : the constructor to call (can be any Object)
2344 // -- r3 : the new target (either the same as the constructor or
2345 // the JSFunction on which new was invoked initially)
2346 // -----------------------------------
2347
2348 // Check if target is a Smi.
2349 Label non_constructor;
2350 __ JumpIfSmi(r1, &non_constructor);
2351
2352 // Dispatch based on instance type.
2353 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
2354 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2355 RelocInfo::CODE_TARGET, eq);
2356
2357 // Check if target has a [[Construct]] internal method.
2358 __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
2359 __ tst(r2, Operand(1 << Map::kIsConstructor));
2360 __ b(eq, &non_constructor);
2361
2362 // Only dispatch to bound functions after checking whether they are
2363 // constructors.
2364 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2365 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2366 RelocInfo::CODE_TARGET, eq);
2367
2368 // Only dispatch to proxies after checking whether they are constructors.
2369 __ cmp(r5, Operand(JS_PROXY_TYPE));
2370 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2371 eq);
2372
2373 // Called Construct on an exotic Object with a [[Construct]] internal method.
2374 {
2375 // Overwrite the original receiver with the (original) target.
2376 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2377 // Let the "call_as_constructor_delegate" take care of the rest.
2378 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
2379 __ Jump(masm->isolate()->builtins()->CallFunction(),
2380 RelocInfo::CODE_TARGET);
2381 }
2382
2383 // Called Construct on an Object that doesn't have a [[Construct]] internal
2384 // method.
2385 __ bind(&non_constructor);
2386 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2387 RelocInfo::CODE_TARGET);
2388}
2389
2390
Steve Blocka7e24c12009-10-30 11:49:00 +00002391void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2392 // ----------- S t a t e -------------
2393 // -- r0 : actual number of arguments
2394 // -- r1 : function (passed through to callee)
2395 // -- r2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002396 // -- r3 : new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002397 // -----------------------------------
2398
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002399 Label invoke, dont_adapt_arguments, stack_overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00002400
2401 Label enough, too_few;
Steve Block6ded16b2010-05-10 14:33:55 +01002402 __ cmp(r0, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002403 __ b(lt, &too_few);
2404 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2405 __ b(eq, &dont_adapt_arguments);
2406
2407 { // Enough parameters: actual >= expected
2408 __ bind(&enough);
2409 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002410 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Steve Blocka7e24c12009-10-30 11:49:00 +00002411
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002412 // Calculate copy start address into r0 and copy end address into r4.
Steve Blocka7e24c12009-10-30 11:49:00 +00002413 // r0: actual number of arguments as a smi
2414 // r1: function
2415 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002416 // r3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002417 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002418 // adjust for return address and receiver
2419 __ add(r0, r0, Operand(2 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002420 __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002421
2422 // Copy the arguments (including the receiver) to the new stack frame.
2423 // r0: copy start address
2424 // r1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002425 // r2: expected number of arguments
2426 // r3: new target (passed through to callee)
2427 // r4: copy end address
Steve Blocka7e24c12009-10-30 11:49:00 +00002428
2429 Label copy;
2430 __ bind(&copy);
2431 __ ldr(ip, MemOperand(r0, 0));
2432 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002433 __ cmp(r0, r4); // Compare before moving to next argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00002434 __ sub(r0, r0, Operand(kPointerSize));
2435 __ b(ne, &copy);
2436
2437 __ b(&invoke);
2438 }
2439
2440 { // Too few parameters: Actual < expected
2441 __ bind(&too_few);
2442 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002443 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Steve Blocka7e24c12009-10-30 11:49:00 +00002444
2445 // Calculate copy start address into r0 and copy end address is fp.
2446 // r0: actual number of arguments as a smi
2447 // r1: function
2448 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002449 // r3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002450 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002451
2452 // Copy the arguments (including the receiver) to the new stack frame.
2453 // r0: copy start address
2454 // r1: function
2455 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002456 // r3: new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002457 Label copy;
2458 __ bind(&copy);
2459 // Adjust load for return address and receiver.
2460 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
2461 __ push(ip);
2462 __ cmp(r0, fp); // Compare before moving to next argument.
2463 __ sub(r0, r0, Operand(kPointerSize));
2464 __ b(ne, &copy);
2465
2466 // Fill the remaining expected arguments with undefined.
2467 // r1: function
2468 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002469 // r3: new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002470 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002471 __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002472 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002473 __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002474 2 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002475
2476 Label fill;
2477 __ bind(&fill);
2478 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002479 __ cmp(sp, r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002480 __ b(ne, &fill);
2481 }
2482
2483 // Call the entry point.
2484 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002485 __ mov(r0, r2);
2486 // r0 : expected number of arguments
2487 // r1 : function (passed through to callee)
2488 // r3 : new target (passed through to callee)
2489 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2490 __ Call(r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002491
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002492 // Store offset of return address for deoptimizer.
2493 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2494
Steve Blocka7e24c12009-10-30 11:49:00 +00002495 // Exit frame and return.
2496 LeaveArgumentsAdaptorFrame(masm);
2497 __ Jump(lr);
2498
2499
2500 // -------------------------------------------
2501 // Dont adapt arguments.
2502 // -------------------------------------------
2503 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002504 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2505 __ Jump(r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002506
2507 __ bind(&stack_overflow);
2508 {
2509 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002510 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002511 __ bkpt(0);
2512 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002513}
2514
2515
2516#undef __
2517
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002518} // namespace internal
2519} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01002520
2521#endif // V8_TARGET_ARCH_ARM