blob: a6bfdb128d4b750d3b4245176e7ce1761eebbc12 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
Leon Clarkee46be812010-01-19 14:06:41 +000020void Builtins::Generate_Adaptor(MacroAssembler* masm,
21 CFunctionId id,
22 BuiltinExtraArguments extra_args) {
23 // ----------- S t a t e -------------
24 // -- r0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025 // -- r1 : target
26 // -- r3 : new.target
Leon Clarkee46be812010-01-19 14:06:41 +000027 // -- sp[0] : last argument
28 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 // -- sp[4 * (argc - 1)] : first argument
Leon Clarkee46be812010-01-19 14:06:41 +000030 // -- sp[4 * argc] : receiver
31 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(r1);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +000039
Leon Clarkee46be812010-01-19 14:06:41 +000040 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 switch (extra_args) {
43 case BuiltinExtraArguments::kTarget:
44 __ Push(r1);
45 ++num_extra_args;
46 break;
47 case BuiltinExtraArguments::kNewTarget:
48 __ Push(r3);
49 ++num_extra_args;
50 break;
51 case BuiltinExtraArguments::kTargetAndNewTarget:
52 __ Push(r1, r3);
53 num_extra_args += 2;
54 break;
55 case BuiltinExtraArguments::kNone:
56 break;
Leon Clarkee46be812010-01-19 14:06:41 +000057 }
58
Steve Block6ded16b2010-05-10 14:33:55 +010059 // JumpToExternalReference expects r0 to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000060 // including the receiver and the extra arguments.
61 __ add(r0, r0, Operand(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062
Steve Block44f0eee2011-05-26 01:26:41 +010063 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000064}
65
66
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067// Load the built-in InternalArray function from the current context.
68static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
69 Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 // Load the InternalArray function from the current native context.
71 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010072}
73
74
Steve Blocka7e24c12009-10-30 11:49:00 +000075// Load the built-in Array function from the current context.
76static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000077 // Load the Array function from the current native context.
78 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Steve Blocka7e24c12009-10-30 11:49:00 +000079}
80
81
Ben Murdoch3ef787d2012-04-12 10:51:47 +010082void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
83 // ----------- S t a t e -------------
84 // -- r0 : number of arguments
85 // -- lr : return address
86 // -- sp[...]: constructor arguments
87 // -----------------------------------
88 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
89
90 // Get the InternalArray function.
91 GenerateLoadInternalArrayFunction(masm, r1);
92
93 if (FLAG_debug_code) {
94 // Initial map for the builtin InternalArray functions should be maps.
95 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 __ SmiTst(r2);
97 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010098 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100100 }
101
102 // Run the native code for the InternalArray function called as a normal
103 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 // tail call a stub
105 InternalArrayConstructorStub stub(masm->isolate());
106 __ TailCallStub(&stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000107}
108
109
110void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
111 // ----------- S t a t e -------------
112 // -- r0 : number of arguments
113 // -- lr : return address
114 // -- sp[...]: constructor arguments
115 // -----------------------------------
116 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
117
118 // Get the Array function.
119 GenerateLoadArrayFunction(masm, r1);
120
121 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100122 // Initial map for the builtin Array functions should be maps.
Steve Blocka7e24c12009-10-30 11:49:00 +0000123 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 __ SmiTst(r2);
125 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
Steve Blocka7e24c12009-10-30 11:49:00 +0000126 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 }
129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 __ mov(r3, r1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132 // tail call a stub
133 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
134 ArrayConstructorStub stub(masm->isolate());
135 __ TailCallStub(&stub);
Steve Blocka7e24c12009-10-30 11:49:00 +0000136}
137
138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100140void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
141 // ----------- S t a t e -------------
142 // -- r0 : number of arguments
143 // -- lr : return address
144 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
145 // -- sp[(argc + 1) * 8] : receiver
146 // -----------------------------------
147 Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
148 Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
149 Heap::RootListIndex const root_index =
150 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
151 : Heap::kMinusInfinityValueRootIndex;
152 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
153
154 // Load the accumulator with the default return value (either -Infinity or
155 // +Infinity), with the tagged value in r1 and the double value in d1.
156 __ LoadRoot(r1, root_index);
157 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
158
159 // Remember how many slots to drop (including the receiver).
160 __ add(r4, r0, Operand(1));
161
162 Label done_loop, loop;
163 __ bind(&loop);
164 {
165 // Check if all parameters done.
166 __ sub(r0, r0, Operand(1), SetCC);
167 __ b(lt, &done_loop);
168
169 // Load the next parameter tagged value into r2.
170 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
171
172 // Load the double value of the parameter into d2, maybe converting the
173 // parameter to a number first using the ToNumberStub if necessary.
174 Label convert, convert_smi, convert_number, done_convert;
175 __ bind(&convert);
176 __ JumpIfSmi(r2, &convert_smi);
177 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
178 __ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
179 {
180 // Parameter is not a Number, use the ToNumberStub to convert it.
181 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
182 __ SmiTag(r0);
183 __ SmiTag(r4);
184 __ Push(r0, r1, r4);
185 __ mov(r0, r2);
186 ToNumberStub stub(masm->isolate());
187 __ CallStub(&stub);
188 __ mov(r2, r0);
189 __ Pop(r0, r1, r4);
190 {
191 // Restore the double accumulator value (d1).
192 Label done_restore;
193 __ SmiToDouble(d1, r1);
194 __ JumpIfSmi(r1, &done_restore);
195 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
196 __ bind(&done_restore);
197 }
198 __ SmiUntag(r4);
199 __ SmiUntag(r0);
200 }
201 __ b(&convert);
202 __ bind(&convert_number);
203 __ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
204 __ b(&done_convert);
205 __ bind(&convert_smi);
206 __ SmiToDouble(d2, r2);
207 __ bind(&done_convert);
208
209 // Perform the actual comparison with the accumulator value on the left hand
210 // side (d1) and the next parameter value on the right hand side (d2).
211 Label compare_nan, compare_swap;
212 __ VFPCompareAndSetFlags(d1, d2);
213 __ b(cc_done, &loop);
214 __ b(cc_swap, &compare_swap);
215 __ b(vs, &compare_nan);
216
217 // Left and right hand side are equal, check for -0 vs. +0.
218 __ VmovHigh(ip, reg);
219 __ cmp(ip, Operand(0x80000000));
220 __ b(ne, &loop);
221
222 // Result is on the right hand side.
223 __ bind(&compare_swap);
224 __ vmov(d1, d2);
225 __ mov(r1, r2);
226 __ b(&loop);
227
228 // At least one side is NaN, which means that the result will be NaN too.
229 __ bind(&compare_nan);
230 __ LoadRoot(r1, Heap::kNanValueRootIndex);
231 __ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
232 __ b(&loop);
233 }
234
235 __ bind(&done_loop);
236 __ mov(r0, r1);
237 __ Drop(r4);
238 __ Ret();
239}
240
241// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800243 // ----------- S t a t e -------------
244 // -- r0 : number of arguments
245 // -- r1 : constructor function
246 // -- lr : return address
247 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
248 // -- sp[argc * 4] : receiver
249 // -----------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800250
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 // 1. Load the first argument into r0 and get rid of the rest (including the
252 // receiver).
253 Label no_arguments;
254 {
255 __ sub(r0, r0, Operand(1), SetCC);
256 __ b(lo, &no_arguments);
257 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
258 __ Drop(2);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800259 }
260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 // 2a. Convert the first argument to a number.
262 ToNumberStub stub(masm->isolate());
263 __ TailCallStub(&stub);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 // 2b. No arguments, return +0.
266 __ bind(&no_arguments);
267 __ Move(r0, Smi::FromInt(0));
268 __ Ret(1);
269}
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271
272// static
273void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800274 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 // -- r0 : number of arguments
276 // -- r1 : constructor function
277 // -- r3 : new target
278 // -- lr : return address
279 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
280 // -- sp[argc * 4] : receiver
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800281 // -----------------------------------
282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 // 1. Make sure we operate in the context of the called function.
284 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800285
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 // 2. Load the first argument into r2 and get rid of the rest (including the
287 // receiver).
288 {
289 Label no_arguments, done;
290 __ sub(r0, r0, Operand(1), SetCC);
291 __ b(lo, &no_arguments);
292 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
293 __ Drop(2);
294 __ b(&done);
295 __ bind(&no_arguments);
296 __ Move(r2, Smi::FromInt(0));
297 __ Drop(1);
298 __ bind(&done);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800299 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800300
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301 // 3. Make sure r2 is a number.
302 {
303 Label done_convert;
304 __ JumpIfSmi(r2, &done_convert);
305 __ CompareObjectType(r2, r4, r4, HEAP_NUMBER_TYPE);
306 __ b(eq, &done_convert);
307 {
308 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
309 __ Push(r1, r3);
310 __ Move(r0, r2);
311 ToNumberStub stub(masm->isolate());
312 __ CallStub(&stub);
313 __ Move(r2, r0);
314 __ Pop(r1, r3);
315 }
316 __ bind(&done_convert);
317 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 // 4. Check if new target and constructor differ.
320 Label new_object;
321 __ cmp(r1, r3);
322 __ b(ne, &new_object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800323
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 // 5. Allocate a JSValue wrapper for the number.
325 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800326 __ Ret();
327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 // 6. Fallback to the runtime to create new object.
329 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100330 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100332 __ Push(r2); // first argument
333 FastNewObjectStub stub(masm->isolate());
334 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000335 __ Pop(r2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100336 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
338 __ Ret();
339}
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800340
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000341
342// static
343void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
344 // ----------- S t a t e -------------
345 // -- r0 : number of arguments
346 // -- r1 : constructor function
347 // -- lr : return address
348 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
349 // -- sp[argc * 4] : receiver
350 // -----------------------------------
351
352 // 1. Load the first argument into r0 and get rid of the rest (including the
353 // receiver).
354 Label no_arguments;
355 {
356 __ sub(r0, r0, Operand(1), SetCC);
357 __ b(lo, &no_arguments);
358 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
359 __ Drop(2);
360 }
361
362 // 2a. At least one argument, return r0 if it's a string, otherwise
363 // dispatch to appropriate conversion.
364 Label to_string, symbol_descriptive_string;
365 {
366 __ JumpIfSmi(r0, &to_string);
367 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
368 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
369 __ b(hi, &to_string);
370 __ b(eq, &symbol_descriptive_string);
371 __ Ret();
372 }
373
374 // 2b. No arguments, return the empty string (and pop the receiver).
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800375 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 {
377 __ LoadRoot(r0, Heap::kempty_stringRootIndex);
378 __ Ret(1);
379 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800380
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000381 // 3a. Convert r0 to a string.
382 __ bind(&to_string);
383 {
384 ToStringStub stub(masm->isolate());
385 __ TailCallStub(&stub);
386 }
387
388 // 3b. Convert symbol in r0 to a string.
389 __ bind(&symbol_descriptive_string);
390 {
391 __ Push(r0);
392 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
393 }
394}
395
396
397// static
398void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
399 // ----------- S t a t e -------------
400 // -- r0 : number of arguments
401 // -- r1 : constructor function
402 // -- r3 : new target
403 // -- lr : return address
404 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
405 // -- sp[argc * 4] : receiver
406 // -----------------------------------
407
408 // 1. Make sure we operate in the context of the called function.
409 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
410
411 // 2. Load the first argument into r2 and get rid of the rest (including the
412 // receiver).
413 {
414 Label no_arguments, done;
415 __ sub(r0, r0, Operand(1), SetCC);
416 __ b(lo, &no_arguments);
417 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
418 __ Drop(2);
419 __ b(&done);
420 __ bind(&no_arguments);
421 __ LoadRoot(r2, Heap::kempty_stringRootIndex);
422 __ Drop(1);
423 __ bind(&done);
424 }
425
426 // 3. Make sure r2 is a string.
427 {
428 Label convert, done_convert;
429 __ JumpIfSmi(r2, &convert);
430 __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
431 __ b(lo, &done_convert);
432 __ bind(&convert);
433 {
434 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
435 ToStringStub stub(masm->isolate());
436 __ Push(r1, r3);
437 __ Move(r0, r2);
438 __ CallStub(&stub);
439 __ Move(r2, r0);
440 __ Pop(r1, r3);
441 }
442 __ bind(&done_convert);
443 }
444
445 // 4. Check if new target and constructor differ.
446 Label new_object;
447 __ cmp(r1, r3);
448 __ b(ne, &new_object);
449
450 // 5. Allocate a JSValue wrapper for the string.
451 __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
452 __ Ret();
453
454 // 6. Fallback to the runtime to create new object.
455 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100458 __ Push(r2); // first argument
459 FastNewObjectStub stub(masm->isolate());
460 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000461 __ Pop(r2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100462 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000463 __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800464 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100465}
466
467
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000468static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
469 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
470 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
471 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
472 __ Jump(r2);
473}
474
Ben Murdoch097c5b22016-05-18 11:27:45 +0100475static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
476 Runtime::FunctionId function_id) {
477 // ----------- S t a t e -------------
478 // -- r0 : argument count (preserved for callee)
479 // -- r1 : target function (preserved for callee)
480 // -- r3 : new target (preserved for callee)
481 // -----------------------------------
482 {
483 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
484 // Push the number of arguments to the callee.
485 __ SmiTag(r0);
486 __ push(r0);
487 // Push a copy of the target function and the new target.
488 __ push(r1);
489 __ push(r3);
490 // Push function as parameter to the runtime call.
491 __ Push(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492
Ben Murdoch097c5b22016-05-18 11:27:45 +0100493 __ CallRuntime(function_id, 1);
494 __ mov(r2, r0);
495
496 // Restore target function and new target.
497 __ pop(r3);
498 __ pop(r1);
499 __ pop(r0);
500 __ SmiUntag(r0, r0);
501 }
502 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
503 __ Jump(r2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504}
505
506
507void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
508 // Checking whether the queued function is ready for install is optional,
509 // since we come across interrupts and stack checks elsewhere. However,
510 // not checking may delay installing ready functions, and always checking
511 // would be quite expensive. A good compromise is to first check against
512 // stack limit as a cue for an interrupt signal.
513 Label ok;
514 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
515 __ cmp(sp, Operand(ip));
516 __ b(hs, &ok);
517
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000519
520 __ bind(&ok);
521 GenerateTailCallToSharedCode(masm);
522}
523
524
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525static void Generate_JSConstructStubHelper(MacroAssembler* masm,
526 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100527 bool create_implicit_receiver,
528 bool check_derived_construct) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000529 // ----------- S t a t e -------------
530 // -- r0 : number of arguments
531 // -- r1 : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532 // -- r2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 // -- r3 : new target
Steve Blocka7e24c12009-10-30 11:49:00 +0000534 // -- lr : return address
535 // -- sp[...]: constructor arguments
536 // -----------------------------------
537
Steve Block44f0eee2011-05-26 01:26:41 +0100538 Isolate* isolate = masm->isolate();
539
Steve Blocka7e24c12009-10-30 11:49:00 +0000540 // Enter a construct frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100541 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
543
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544 // Preserve the incoming parameters on the stack.
545 __ AssertUndefinedOrAllocationSite(r2, r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 __ SmiTag(r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100547 __ Push(r2, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100550 // Allocate the new receiver object.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 __ Push(r1, r3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100552 FastNewObjectStub stub(masm->isolate());
553 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 __ mov(r4, r0);
555 __ Pop(r1, r3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100556
Ben Murdoch097c5b22016-05-18 11:27:45 +0100557 // ----------- S t a t e -------------
558 // -- r1: constructor function
559 // -- r3: new target
560 // -- r4: newly allocated object
561 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100562
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563 // Retrieve smi-tagged arguments count from the stack.
564 __ ldr(r0, MemOperand(sp));
Steve Blocka7e24c12009-10-30 11:49:00 +0000565 }
566
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 __ SmiUntag(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 if (create_implicit_receiver) {
570 // Push the allocated receiver to the stack. We need two copies
571 // because we may have to return the original one and the calling
572 // conventions dictate that the called function pops the receiver.
573 __ push(r4);
574 __ push(r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000575 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000576 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000578
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100579 // Set up pointer to last argument.
580 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000581
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 // Copy arguments and receiver to the expression stack.
583 // r0: number of arguments
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000584 // r1: constructor function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100585 // r2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 // r3: new target
587 // r4: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100588 // sp[0]: receiver
589 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 // sp[2]: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 Label loop, entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 __ SmiTag(r4, r0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593 __ b(&entry);
594 __ bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100596 __ push(ip);
597 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000598 __ sub(r4, r4, Operand(2), SetCC);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100599 __ b(ge, &loop);
600
601 // Call the function.
602 // r0: number of arguments
603 // r1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000604 // r3: new target
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100605 if (is_api_function) {
606 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
607 Handle<Code> code =
608 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100610 } else {
611 ParameterCount actual(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000612 __ InvokeFunction(r1, r3, actual, CALL_FUNCTION,
613 CheckDebugStepCallWrapper());
Steve Blocka7e24c12009-10-30 11:49:00 +0000614 }
615
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100616 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000617 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100618 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
619 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000620
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100621 // Restore context from the frame.
622 // r0: result
623 // sp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 // sp[1]: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100625 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000626
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000627 if (create_implicit_receiver) {
628 // If the result is an object (in the ECMA sense), we should get rid
629 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
630 // on page 74.
631 Label use_receiver, exit;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100632
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000633 // If the result is a smi, it is *not* an object in the ECMA sense.
634 // r0: result
635 // sp[0]: receiver
636 // sp[1]: number of arguments (smi-tagged)
637 __ JumpIfSmi(r0, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100638
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 // If the type of the result (stored in its map) is less than
640 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
641 __ CompareObjectType(r0, r1, r3, FIRST_JS_RECEIVER_TYPE);
642 __ b(ge, &exit);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 // Throw away the result of the constructor invocation and use the
645 // on-stack receiver as the result.
646 __ bind(&use_receiver);
647 __ ldr(r0, MemOperand(sp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100648
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000649 // Remove receiver from the stack, remove caller arguments, and
650 // return.
651 __ bind(&exit);
652 // r0: result
653 // sp[0]: receiver (newly allocated object)
654 // sp[1]: number of arguments (smi-tagged)
655 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
656 } else {
657 __ ldr(r1, MemOperand(sp));
658 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100659
660 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000661 }
662
Ben Murdoch097c5b22016-05-18 11:27:45 +0100663 // ES6 9.2.2. Step 13+
664 // Check that the result is not a Smi, indicating that the constructor result
665 // from a derived class is neither undefined nor an Object.
666 if (check_derived_construct) {
667 Label dont_throw;
668 __ JumpIfNotSmi(r0, &dont_throw);
669 {
670 FrameScope scope(masm, StackFrame::INTERNAL);
671 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
672 }
673 __ bind(&dont_throw);
674 }
675
Steve Blocka7e24c12009-10-30 11:49:00 +0000676 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
677 __ add(sp, sp, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000678 if (create_implicit_receiver) {
679 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
680 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000681 __ Jump(lr);
682}
683
684
Leon Clarkee46be812010-01-19 14:06:41 +0000685void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100686 Generate_JSConstructStubHelper(masm, false, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000687}
688
689
690void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100691 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000692}
693
694
695void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100696 Generate_JSConstructStubHelper(masm, false, false, false);
697}
698
699
700void Builtins::Generate_JSBuiltinsConstructStubForDerived(
701 MacroAssembler* masm) {
702 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703}
704
705
706void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
707 FrameScope scope(masm, StackFrame::INTERNAL);
708 __ push(r1);
709 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
710}
711
712
713enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
714
715
716// Clobbers r2; preserves all other registers.
717static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
718 IsTagged argc_is_tagged) {
719 // Check the stack for overflow. We are not trying to catch
720 // interruptions (e.g. debug break and preemption) here, so the "real stack
721 // limit" is checked.
722 Label okay;
723 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
724 // Make r2 the space we have left. The stack might already be overflowed
725 // here which will cause r2 to become negative.
726 __ sub(r2, sp, r2);
727 // Check if the arguments will overflow the stack.
728 if (argc_is_tagged == kArgcIsSmiTagged) {
729 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
730 } else {
731 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
732 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
733 }
734 __ b(gt, &okay); // Signed comparison.
735
736 // Out of stack space.
737 __ CallRuntime(Runtime::kThrowStackOverflow);
738
739 __ bind(&okay);
Leon Clarkee46be812010-01-19 14:06:41 +0000740}
741
742
Steve Blocka7e24c12009-10-30 11:49:00 +0000743static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
744 bool is_construct) {
745 // Called from Generate_JS_Entry
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000746 // r0: new.target
Steve Blocka7e24c12009-10-30 11:49:00 +0000747 // r1: function
748 // r2: receiver
749 // r3: argc
750 // r4: argv
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000751 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +0000753
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100754 // Clear the context before we push it when entering the internal frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 __ mov(cp, Operand::Zero());
Steve Blocka7e24c12009-10-30 11:49:00 +0000756
757 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100758 {
759 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000760
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000761 // Setup the context (we need to use the caller context from the isolate).
762 ExternalReference context_address(Isolate::kContextAddress,
763 masm->isolate());
764 __ mov(cp, Operand(context_address));
765 __ ldr(cp, MemOperand(cp));
Steve Blocka7e24c12009-10-30 11:49:00 +0000766
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100767 __ InitializeRootRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000768
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100769 // Push the function and the receiver onto the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000770 __ Push(r1, r2);
771
772 // Check if we have enough stack space to push all arguments.
773 // Clobbers r2.
774 Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
775
776 // Remember new.target.
777 __ mov(r5, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000778
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100779 // Copy arguments to the stack in a loop.
780 // r1: function
781 // r3: argc
782 // r4: argv, i.e. points to first arg
783 Label loop, entry;
784 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
785 // r2 points past last arg.
786 __ b(&entry);
787 __ bind(&loop);
788 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
789 __ ldr(r0, MemOperand(r0)); // dereference handle
790 __ push(r0); // push parameter
791 __ bind(&entry);
792 __ cmp(r4, r2);
793 __ b(ne, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000794
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000795 // Setup new.target and argc.
796 __ mov(r0, Operand(r3));
797 __ mov(r3, Operand(r5));
798
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100799 // Initialize all JavaScript callee-saved registers, since they will be seen
800 // by the garbage collector as part of handlers.
801 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
802 __ mov(r5, Operand(r4));
803 __ mov(r6, Operand(r4));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 if (!FLAG_enable_embedded_constant_pool) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000805 __ mov(r8, Operand(r4));
806 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100807 if (kR9Available == 1) {
808 __ mov(r9, Operand(r4));
809 }
810
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000811 // Invoke the code.
812 Handle<Code> builtin = is_construct
813 ? masm->isolate()->builtins()->Construct()
814 : masm->isolate()->builtins()->Call();
815 __ Call(builtin, RelocInfo::CODE_TARGET);
816
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100817 // Exit the JS frame and remove the parameters (except function), and
818 // return.
819 // Respect ABI stack constraint.
Steve Blocka7e24c12009-10-30 11:49:00 +0000820 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000821 __ Jump(lr);
822
823 // r0: result
824}
825
826
827void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
828 Generate_JSEntryTrampolineHelper(masm, false);
829}
830
831
832void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
833 Generate_JSEntryTrampolineHelper(masm, true);
834}
835
836
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000837// Generate code for entering a JS function with the interpreter.
838// On entry to the function the receiver and arguments have been pushed on the
839// stack left to right. The actual argument count matches the formal parameter
840// count expected by the function.
841//
842// The live registers are:
843// o r1: the JS function object being called.
844// o r3: the new target
845// o cp: our context
846// o pp: the caller's constant pool pointer (if enabled)
847// o fp: the caller's frame pointer
848// o sp: stack pointer
849// o lr: return address
850//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100851// The function builds an interpreter frame. See InterpreterFrameConstants in
852// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000853void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
854 // Open a frame scope to indicate that there is a frame on the stack. The
855 // MANUAL indicates that the scope shouldn't actually generate code to set up
856 // the frame (that is done below).
857 FrameScope frame_scope(masm, StackFrame::MANUAL);
858 __ PushFixedFrame(r1);
859 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860
861 // Get the bytecode array from the function object and load the pointer to the
862 // first entry into kInterpreterBytecodeRegister.
863 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100864 Register debug_info = kInterpreterBytecodeArrayRegister;
865 DCHECK(!debug_info.is(r0));
866 __ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
867 __ cmp(debug_info, Operand(DebugInfo::uninitialized()));
868 // Load original bytecode array or the debug copy.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000869 __ ldr(kInterpreterBytecodeArrayRegister,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100870 FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
871 __ ldr(kInterpreterBytecodeArrayRegister,
872 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex), ne);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873
874 if (FLAG_debug_code) {
875 // Check function data field is actually a BytecodeArray object.
876 __ SmiTst(kInterpreterBytecodeArrayRegister);
877 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
878 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
879 BYTECODE_ARRAY_TYPE);
880 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
881 }
882
Ben Murdoch097c5b22016-05-18 11:27:45 +0100883 // Push new.target, bytecode array and zero for bytecode array offset.
884 __ mov(r0, Operand(0));
885 __ Push(r3, kInterpreterBytecodeArrayRegister, r0);
886
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000887 // Allocate the local and temporary register file on the stack.
888 {
889 // Load frame size from the BytecodeArray object.
890 __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
891 BytecodeArray::kFrameSizeOffset));
892
893 // Do a stack check to ensure we don't go over the limit.
894 Label ok;
895 __ sub(r9, sp, Operand(r4));
896 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
897 __ cmp(r9, Operand(r2));
898 __ b(hs, &ok);
899 __ CallRuntime(Runtime::kThrowStackOverflow);
900 __ bind(&ok);
901
902 // If ok, push undefined as the initial value for all register file entries.
903 Label loop_header;
904 Label loop_check;
905 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
906 __ b(&loop_check, al);
907 __ bind(&loop_header);
908 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
909 __ push(r9);
910 // Continue loop if not done.
911 __ bind(&loop_check);
912 __ sub(r4, r4, Operand(kPointerSize), SetCC);
913 __ b(&loop_header, ge);
914 }
915
916 // TODO(rmcilroy): List of things not currently dealt with here but done in
917 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000918 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000919 // - Code aging of the BytecodeArray object.
920
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000921 // Load accumulator, register file, bytecode offset, dispatch table into
922 // registers.
923 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
924 __ add(kInterpreterRegisterFileRegister, fp,
925 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
926 __ mov(kInterpreterBytecodeOffsetRegister,
927 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100928 __ mov(kInterpreterDispatchTableRegister,
929 Operand(ExternalReference::interpreter_dispatch_table_address(
930 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000931
932 // Dispatch to the first bytecode handler for the function.
933 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
934 kInterpreterBytecodeOffsetRegister));
935 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
936 kPointerSizeLog2));
937 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
938 // and header removal.
939 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
940 __ Call(ip);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100941
942 // Even though the first bytecode handler was called, we will never return.
943 __ Abort(kUnexpectedReturnFromBytecodeHandler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000944}
945
946
947void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
948 // TODO(rmcilroy): List of things not currently dealt with here but done in
949 // fullcodegen's EmitReturnSequence.
950 // - Supporting FLAG_trace for Runtime::TraceExit.
951 // - Support profiler (specifically decrementing profiling_counter
952 // appropriately and calling out to HandleInterrupts if necessary).
953
954 // The return value is in accumulator, which is already in r0.
955
956 // Leave the frame (also dropping the register file).
957 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
958
959 // Drop receiver + arguments and return.
960 __ ldr(ip, FieldMemOperand(kInterpreterBytecodeArrayRegister,
961 BytecodeArray::kParameterSizeOffset));
962 __ add(sp, sp, ip, LeaveCC);
963 __ Jump(lr);
964}
965
966
967static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
968 Register limit, Register scratch) {
969 Label loop_header, loop_check;
970 __ b(al, &loop_check);
971 __ bind(&loop_header);
972 __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
973 __ push(scratch);
974 __ bind(&loop_check);
975 __ cmp(index, limit);
976 __ b(gt, &loop_header);
977}
978
979
980// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100981void Builtins::Generate_InterpreterPushArgsAndCallImpl(
982 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000983 // ----------- S t a t e -------------
984 // -- r0 : the number of arguments (not including the receiver)
985 // -- r2 : the address of the first argument to be pushed. Subsequent
986 // arguments should be consecutive above this, in the same order as
987 // they are to be pushed onto the stack.
988 // -- r1 : the target to call (can be any Object).
989 // -----------------------------------
990
991 // Find the address of the last argument.
992 __ add(r3, r0, Operand(1)); // Add one for receiver.
993 __ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
994 __ sub(r3, r2, r3);
995
996 // Push the arguments.
997 Generate_InterpreterPushArgs(masm, r2, r3, r4);
998
999 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1001 tail_call_mode),
1002 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001003}
1004
1005
1006// static
1007void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1008 // ----------- S t a t e -------------
1009 // -- r0 : argument count (not including receiver)
1010 // -- r3 : new target
1011 // -- r1 : constructor to call
1012 // -- r2 : address of the first argument
1013 // -----------------------------------
1014
1015 // Find the address of the last argument.
1016 __ mov(r4, Operand(r0, LSL, kPointerSizeLog2));
1017 __ sub(r4, r2, r4);
1018
1019 // Push a slot for the receiver to be constructed.
1020 __ mov(ip, Operand::Zero());
1021 __ push(ip);
1022
1023 // Push the arguments.
1024 Generate_InterpreterPushArgs(masm, r2, r4, r5);
1025
1026 // Call the constructor with r0, r1, and r3 unmodified.
1027 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1028}
1029
1030
Ben Murdoch097c5b22016-05-18 11:27:45 +01001031static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001032 // Initialize register file register and dispatch table register.
1033 __ add(kInterpreterRegisterFileRegister, fp,
1034 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001035 __ mov(kInterpreterDispatchTableRegister,
1036 Operand(ExternalReference::interpreter_dispatch_table_address(
1037 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038
1039 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001040 __ ldr(kContextRegister,
1041 MemOperand(kInterpreterRegisterFileRegister,
1042 InterpreterFrameConstants::kContextFromRegisterPointer));
1043
1044 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001045 __ ldr(
1046 kInterpreterBytecodeArrayRegister,
1047 MemOperand(kInterpreterRegisterFileRegister,
1048 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049
1050 if (FLAG_debug_code) {
1051 // Check function data field is actually a BytecodeArray object.
1052 __ SmiTst(kInterpreterBytecodeArrayRegister);
1053 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1054 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1055 BYTECODE_ARRAY_TYPE);
1056 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1057 }
1058
1059 // Get the target bytecode offset from the frame.
1060 __ ldr(kInterpreterBytecodeOffsetRegister,
1061 MemOperand(
1062 kInterpreterRegisterFileRegister,
1063 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1064 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1065
1066 // Dispatch to the target bytecode.
1067 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1068 kInterpreterBytecodeOffsetRegister));
1069 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1070 kPointerSizeLog2));
1071 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1072 __ mov(pc, ip);
1073}
1074
1075
Ben Murdoch097c5b22016-05-18 11:27:45 +01001076static void Generate_InterpreterNotifyDeoptimizedHelper(
1077 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1078 // Enter an internal frame.
1079 {
1080 FrameScope scope(masm, StackFrame::INTERNAL);
1081
1082 // Pass the deoptimization type to the runtime system.
1083 __ mov(r1, Operand(Smi::FromInt(static_cast<int>(type))));
1084 __ push(r1);
1085 __ CallRuntime(Runtime::kNotifyDeoptimized);
1086 // Tear down internal frame.
1087 }
1088
1089 // Drop state (we don't use these for interpreter deopts) and and pop the
1090 // accumulator value into the accumulator register.
1091 __ Drop(1);
1092 __ Pop(kInterpreterAccumulatorRegister);
1093
1094 // Enter the bytecode dispatch.
1095 Generate_EnterBytecodeDispatch(masm);
1096}
1097
1098
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001099void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1100 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1101}
1102
1103
1104void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1105 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1106}
1107
1108
1109void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1110 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1111}
1112
Ben Murdoch097c5b22016-05-18 11:27:45 +01001113void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1114 // Set the address of the interpreter entry trampoline as a return address.
1115 // This simulates the initial call to bytecode handlers in interpreter entry
1116 // trampoline. The return will never actually be taken, but our stack walker
1117 // uses this address to determine whether a frame is interpreted.
1118 __ Move(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1119
1120 Generate_EnterBytecodeDispatch(masm);
1121}
1122
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001123
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001125 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Iain Merrick75681382010-08-19 15:07:18 +01001126}
1127
1128
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001130 GenerateTailCallToReturnedCode(masm,
1131 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132}
1133
1134
1135void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001136 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137}
1138
1139
1140static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1141 // For now, we are relying on the fact that make_code_young doesn't do any
1142 // garbage collection which allows us to save/restore the registers without
1143 // worrying about which of them contain pointers. We also don't build an
1144 // internal frame to make the code faster, since we shouldn't have to do stack
1145 // crawls in MakeCodeYoung. This seems a bit fragile.
1146
1147 // The following registers must be saved and restored when calling through to
1148 // the runtime:
1149 // r0 - contains return address (beginning of patch sequence)
1150 // r1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151 // r3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001152 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001153 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154 __ PrepareCallCFunction(2, 0, r2);
1155 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1156 __ CallCFunction(
1157 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001159 __ mov(pc, r0);
1160}
1161
1162#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1163void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1164 MacroAssembler* masm) { \
1165 GenerateMakeCodeYoungAgainCommon(masm); \
1166} \
1167void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1168 MacroAssembler* masm) { \
1169 GenerateMakeCodeYoungAgainCommon(masm); \
1170}
1171CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1172#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1173
1174
1175void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1176 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1177 // that make_code_young doesn't do any garbage collection which allows us to
1178 // save/restore the registers without worrying about which of them contain
1179 // pointers.
1180
1181 // The following registers must be saved and restored when calling through to
1182 // the runtime:
1183 // r0 - contains return address (beginning of patch sequence)
1184 // r1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001185 // r3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001186 FrameScope scope(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001187 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001188 __ PrepareCallCFunction(2, 0, r2);
1189 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1190 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
1191 masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001192 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001193
1194 // Perform prologue operations usually performed by the young code stub.
1195 __ PushFixedFrame(r1);
1196 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1197
1198 // Jump to point after the code-age stub.
1199 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1200 __ mov(pc, r0);
1201}
1202
1203
1204void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1205 GenerateMakeCodeYoungAgainCommon(masm);
1206}
1207
1208
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001209void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1210 Generate_MarkCodeAsExecutedOnce(masm);
1211}
1212
1213
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1215 SaveFPRegsMode save_doubles) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001216 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001218
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001219 // Preserve registers across notification, this is important for compiled
1220 // stubs that tail call the runtime on deopts passing their parameters in
1221 // registers.
1222 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1223 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001224 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001225 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001226 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001227
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1229 __ mov(pc, lr); // Jump to miss handler
1230}
1231
1232
1233void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1234 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1235}
1236
1237
1238void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1239 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001240}
1241
1242
1243static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1244 Deoptimizer::BailoutType type) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001245 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001246 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001247 // Pass the function and deoptimization type to the runtime system.
1248 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1249 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001250 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001251 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001252
1253 // Get the full codegen state from the stack and untag it -> r6.
1254 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1255 __ SmiUntag(r6);
1256 // Switch on the state.
1257 Label with_tos_register, unknown_state;
1258 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1259 __ b(ne, &with_tos_register);
1260 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1261 __ Ret();
1262
1263 __ bind(&with_tos_register);
1264 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1265 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1266 __ b(ne, &unknown_state);
1267 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1268 __ Ret();
1269
1270 __ bind(&unknown_state);
1271 __ stop("no cases left");
1272}
1273
1274
1275void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1276 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1277}
1278
1279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001280void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1281 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1282}
1283
1284
Ben Murdochb0fe1622011-05-05 13:52:32 +01001285void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1286 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1287}
1288
1289
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001290static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1291 Register function_template_info,
1292 Register scratch0, Register scratch1,
1293 Register scratch2,
1294 Label* receiver_check_failed) {
1295 Register signature = scratch0;
1296 Register map = scratch1;
1297 Register constructor = scratch2;
1298
1299 // If there is no signature, return the holder.
1300 __ ldr(signature, FieldMemOperand(function_template_info,
1301 FunctionTemplateInfo::kSignatureOffset));
1302 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1303 Label receiver_check_passed;
1304 __ b(eq, &receiver_check_passed);
1305
1306 // Walk the prototype chain.
1307 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1308 Label prototype_loop_start;
1309 __ bind(&prototype_loop_start);
1310
1311 // Get the constructor, if any.
1312 __ GetMapConstructor(constructor, map, ip, ip);
1313 __ cmp(ip, Operand(JS_FUNCTION_TYPE));
1314 Label next_prototype;
1315 __ b(ne, &next_prototype);
1316 Register type = constructor;
1317 __ ldr(type,
1318 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1319 __ ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1320
1321 // Loop through the chain of inheriting function templates.
1322 Label function_template_loop;
1323 __ bind(&function_template_loop);
1324
1325 // If the signatures match, we have a compatible receiver.
1326 __ cmp(signature, type);
1327 __ b(eq, &receiver_check_passed);
1328
1329 // If the current type is not a FunctionTemplateInfo, load the next prototype
1330 // in the chain.
1331 __ JumpIfSmi(type, &next_prototype);
1332 __ CompareObjectType(type, ip, ip, FUNCTION_TEMPLATE_INFO_TYPE);
1333
1334 // Otherwise load the parent function template and iterate.
1335 __ ldr(type,
1336 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset),
1337 eq);
1338 __ b(&function_template_loop, eq);
1339
1340 // Load the next prototype.
1341 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001342 __ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001343 __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001344 __ b(eq, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001345 __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1346 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001347 // Iterate.
1348 __ b(&prototype_loop_start);
1349
1350 __ bind(&receiver_check_passed);
1351}
1352
1353
1354void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1355 // ----------- S t a t e -------------
1356 // -- r0 : number of arguments excluding receiver
1357 // -- r1 : callee
1358 // -- lr : return address
1359 // -- sp[0] : last argument
1360 // -- ...
1361 // -- sp[4 * (argc - 1)] : first argument
1362 // -- sp[4 * argc] : receiver
1363 // -----------------------------------
1364
1365 // Load the FunctionTemplateInfo.
1366 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1367 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1368
1369 // Do the compatible receiver check.
1370 Label receiver_check_failed;
1371 __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1372 CompatibleReceiverCheck(masm, r2, r3, r4, r5, r6, &receiver_check_failed);
1373
1374 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1375 // beginning of the code.
1376 __ ldr(r4, FieldMemOperand(r3, FunctionTemplateInfo::kCallCodeOffset));
1377 __ ldr(r4, FieldMemOperand(r4, CallHandlerInfo::kFastHandlerOffset));
1378 __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1379 __ Jump(r4);
1380
1381 // Compatible receiver check failed: throw an Illegal Invocation exception.
1382 __ bind(&receiver_check_failed);
1383 // Drop the arguments (including the receiver)
1384 __ add(r0, r0, Operand(1));
1385 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1386 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1387}
1388
1389
Ben Murdochb0fe1622011-05-05 13:52:32 +01001390void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 // Lookup the function in the JavaScript frame.
Steve Block1e0659c2011-05-24 12:43:12 +01001392 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001393 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001394 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1395 // Pass function as argument.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001396 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001397 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001398 }
Steve Block1e0659c2011-05-24 12:43:12 +01001399
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001400 // If the code object is null, just return to the unoptimized code.
Steve Block1e0659c2011-05-24 12:43:12 +01001401 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001402 __ cmp(r0, Operand(Smi::FromInt(0)));
Steve Block1e0659c2011-05-24 12:43:12 +01001403 __ b(ne, &skip);
1404 __ Ret();
1405
1406 __ bind(&skip);
Steve Block1e0659c2011-05-24 12:43:12 +01001407
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001408 // Load deoptimization data from the code object.
1409 // <deopt_data> = <code>[#deoptimization_data_offset]
1410 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1411
1412 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001413 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1414
1415 if (FLAG_enable_embedded_constant_pool) {
1416 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001417 }
1418
1419 // Load the OSR entrypoint offset from the deoptimization data.
1420 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1421 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1422 DeoptimizationInputData::kOsrPcOffsetIndex)));
1423
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001424 // Compute the target address = code start + osr_offset
1425 __ add(lr, r0, Operand::SmiUntag(r1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001426
1427 // And "return" to the OSR entry point of the function.
1428 __ Ret();
1429 }
1430}
1431
1432
1433void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1434 // We check the stack limit as indicator that recompilation might be done.
1435 Label ok;
1436 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1437 __ cmp(sp, Operand(ip));
1438 __ b(hs, &ok);
1439 {
1440 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001441 __ CallRuntime(Runtime::kStackGuard);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001442 }
1443 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1444 RelocInfo::CODE_TARGET);
1445
1446 __ bind(&ok);
1447 __ Ret();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001448}
1449
1450
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451// static
1452void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1453 int field_index) {
1454 // ----------- S t a t e -------------
1455 // -- lr : return address
1456 // -- sp[0] : receiver
1457 // -----------------------------------
1458
1459 // 1. Pop receiver into r0 and check that it's actually a JSDate object.
1460 Label receiver_not_date;
1461 {
1462 __ Pop(r0);
1463 __ JumpIfSmi(r0, &receiver_not_date);
1464 __ CompareObjectType(r0, r1, r2, JS_DATE_TYPE);
1465 __ b(ne, &receiver_not_date);
1466 }
1467
1468 // 2. Load the specified date field, falling back to the runtime as necessary.
1469 if (field_index == JSDate::kDateValue) {
1470 __ ldr(r0, FieldMemOperand(r0, JSDate::kValueOffset));
1471 } else {
1472 if (field_index < JSDate::kFirstUncachedField) {
1473 Label stamp_mismatch;
1474 __ mov(r1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1475 __ ldr(r1, MemOperand(r1));
1476 __ ldr(ip, FieldMemOperand(r0, JSDate::kCacheStampOffset));
1477 __ cmp(r1, ip);
1478 __ b(ne, &stamp_mismatch);
1479 __ ldr(r0, FieldMemOperand(
1480 r0, JSDate::kValueOffset + field_index * kPointerSize));
1481 __ Ret();
1482 __ bind(&stamp_mismatch);
1483 }
1484 FrameScope scope(masm, StackFrame::INTERNAL);
1485 __ PrepareCallCFunction(2, r1);
1486 __ mov(r1, Operand(Smi::FromInt(field_index)));
1487 __ CallCFunction(
1488 ExternalReference::get_date_field_function(masm->isolate()), 2);
1489 }
1490 __ Ret();
1491
1492 // 3. Raise a TypeError if the receiver is not a date.
1493 __ bind(&receiver_not_date);
1494 __ TailCallRuntime(Runtime::kThrowNotDateError);
1495}
1496
1497
1498// static
1499void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1500 // ----------- S t a t e -------------
1501 // -- r0 : argc
1502 // -- sp[0] : argArray
1503 // -- sp[4] : thisArg
1504 // -- sp[8] : receiver
1505 // -----------------------------------
1506
1507 // 1. Load receiver into r1, argArray into r0 (if present), remove all
1508 // arguments from the stack (including the receiver), and push thisArg (if
1509 // present) instead.
1510 {
1511 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1512 __ mov(r3, r2);
1513 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1514 __ sub(r4, r0, Operand(1), SetCC);
1515 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArg
1516 __ sub(r4, r4, Operand(1), SetCC, ge);
1517 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argArray
1518 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1519 __ str(r2, MemOperand(sp, 0));
1520 __ mov(r0, r3);
1521 }
1522
1523 // ----------- S t a t e -------------
1524 // -- r0 : argArray
1525 // -- r1 : receiver
1526 // -- sp[0] : thisArg
1527 // -----------------------------------
1528
1529 // 2. Make sure the receiver is actually callable.
1530 Label receiver_not_callable;
1531 __ JumpIfSmi(r1, &receiver_not_callable);
1532 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1533 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1534 __ tst(r4, Operand(1 << Map::kIsCallable));
1535 __ b(eq, &receiver_not_callable);
1536
1537 // 3. Tail call with no arguments if argArray is null or undefined.
1538 Label no_arguments;
1539 __ JumpIfRoot(r0, Heap::kNullValueRootIndex, &no_arguments);
1540 __ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &no_arguments);
1541
1542 // 4a. Apply the receiver to the given argArray (passing undefined for
1543 // new.target).
1544 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1545 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1546
1547 // 4b. The argArray is either null or undefined, so we tail call without any
1548 // arguments to the receiver.
1549 __ bind(&no_arguments);
1550 {
1551 __ mov(r0, Operand(0));
1552 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1553 }
1554
1555 // 4c. The receiver is not callable, throw an appropriate TypeError.
1556 __ bind(&receiver_not_callable);
1557 {
1558 __ str(r1, MemOperand(sp, 0));
1559 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1560 }
1561}
1562
1563
1564// static
1565void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001566 // 1. Make sure we have at least one argument.
Andrei Popescu402d9372010-02-26 13:31:12 +00001567 // r0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001568 {
1569 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001570 __ cmp(r0, Operand::Zero());
Steve Blocka7e24c12009-10-30 11:49:00 +00001571 __ b(ne, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 __ PushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001573 __ add(r0, r0, Operand(1));
1574 __ bind(&done);
1575 }
1576
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001577 // 2. Get the callable to call (passed as receiver) from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +00001578 // r0: actual number of arguments
Andrei Popescu402d9372010-02-26 13:31:12 +00001579 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581 // 3. Shift arguments and return address one slot down on the stack
Andrei Popescu402d9372010-02-26 13:31:12 +00001582 // (overwriting the original receiver). Adjust argument count to make
1583 // the original first argument the new receiver.
1584 // r0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001585 // r1: callable
1586 {
1587 Label loop;
Steve Blocka7e24c12009-10-30 11:49:00 +00001588 // Calculate the copy start address (destination). Copy end address is sp.
1589 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001590
1591 __ bind(&loop);
1592 __ ldr(ip, MemOperand(r2, -kPointerSize));
1593 __ str(ip, MemOperand(r2));
1594 __ sub(r2, r2, Operand(kPointerSize));
1595 __ cmp(r2, sp);
1596 __ b(ne, &loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001597 // Adjust the actual number of arguments and remove the top element
1598 // (which is a copy of the last argument).
1599 __ sub(r0, r0, Operand(1));
1600 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001601 }
1602
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001603 // 4. Call the callable.
1604 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00001605}
1606
1607
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001608void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1609 // ----------- S t a t e -------------
1610 // -- r0 : argc
1611 // -- sp[0] : argumentsList
1612 // -- sp[4] : thisArgument
1613 // -- sp[8] : target
1614 // -- sp[12] : receiver
1615 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00001616
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
1618 // remove all arguments from the stack (including the receiver), and push
1619 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001620 {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001621 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001622 __ mov(r2, r1);
1623 __ mov(r3, r1);
1624 __ sub(r4, r0, Operand(1), SetCC);
1625 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1626 __ sub(r4, r4, Operand(1), SetCC, ge);
1627 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArgument
1628 __ sub(r4, r4, Operand(1), SetCC, ge);
1629 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1630 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1631 __ str(r2, MemOperand(sp, 0));
1632 __ mov(r0, r3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001633 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634
1635 // ----------- S t a t e -------------
1636 // -- r0 : argumentsList
1637 // -- r1 : target
1638 // -- sp[0] : thisArgument
1639 // -----------------------------------
1640
1641 // 2. Make sure the target is actually callable.
1642 Label target_not_callable;
1643 __ JumpIfSmi(r1, &target_not_callable);
1644 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1645 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1646 __ tst(r4, Operand(1 << Map::kIsCallable));
1647 __ b(eq, &target_not_callable);
1648
1649 // 3a. Apply the target to the given argumentsList (passing undefined for
1650 // new.target).
1651 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1652 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1653
1654 // 3b. The target is not callable, throw an appropriate TypeError.
1655 __ bind(&target_not_callable);
1656 {
1657 __ str(r1, MemOperand(sp, 0));
1658 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1659 }
1660}
1661
1662
1663void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1664 // ----------- S t a t e -------------
1665 // -- r0 : argc
1666 // -- sp[0] : new.target (optional)
1667 // -- sp[4] : argumentsList
1668 // -- sp[8] : target
1669 // -- sp[12] : receiver
1670 // -----------------------------------
1671
1672 // 1. Load target into r1 (if present), argumentsList into r0 (if present),
1673 // new.target into r3 (if present, otherwise use target), remove all
1674 // arguments from the stack (including the receiver), and push thisArgument
1675 // (if present) instead.
1676 {
1677 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1678 __ mov(r2, r1);
1679 __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1680 __ sub(r4, r0, Operand(1), SetCC);
1681 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1682 __ mov(r3, r1); // new.target defaults to target
1683 __ sub(r4, r4, Operand(1), SetCC, ge);
1684 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1685 __ sub(r4, r4, Operand(1), SetCC, ge);
1686 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // new.target
1687 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1688 __ mov(r0, r2);
1689 }
1690
1691 // ----------- S t a t e -------------
1692 // -- r0 : argumentsList
1693 // -- r3 : new.target
1694 // -- r1 : target
1695 // -- sp[0] : receiver (undefined)
1696 // -----------------------------------
1697
1698 // 2. Make sure the target is actually a constructor.
1699 Label target_not_constructor;
1700 __ JumpIfSmi(r1, &target_not_constructor);
1701 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1702 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1703 __ tst(r4, Operand(1 << Map::kIsConstructor));
1704 __ b(eq, &target_not_constructor);
1705
1706 // 3. Make sure the target is actually a constructor.
1707 Label new_target_not_constructor;
1708 __ JumpIfSmi(r3, &new_target_not_constructor);
1709 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
1710 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1711 __ tst(r4, Operand(1 << Map::kIsConstructor));
1712 __ b(eq, &new_target_not_constructor);
1713
1714 // 4a. Construct the target with the given new.target and argumentsList.
1715 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1716
1717 // 4b. The target is not a constructor, throw an appropriate TypeError.
1718 __ bind(&target_not_constructor);
1719 {
1720 __ str(r1, MemOperand(sp, 0));
1721 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1722 }
1723
1724 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1725 __ bind(&new_target_not_constructor);
1726 {
1727 __ str(r3, MemOperand(sp, 0));
1728 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1729 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001730}
1731
1732
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001733static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1734 Label* stack_overflow) {
1735 // ----------- S t a t e -------------
1736 // -- r0 : actual number of arguments
1737 // -- r1 : function (passed through to callee)
1738 // -- r2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 // -- r3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740 // -----------------------------------
1741 // Check the stack for overflow. We are not trying to catch
1742 // interruptions (e.g. debug break and preemption) here, so the "real stack
1743 // limit" is checked.
1744 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1745 // Make r5 the space we have left. The stack might already be overflowed
1746 // here which will cause r5 to become negative.
1747 __ sub(r5, sp, r5);
1748 // Check if the arguments will overflow the stack.
1749 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
1750 __ b(le, stack_overflow); // Signed comparison.
1751}
1752
1753
Steve Blocka7e24c12009-10-30 11:49:00 +00001754static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 __ SmiTag(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001756 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001758 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
1759 fp.bit() | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 __ add(fp, sp,
1761 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001762}
1763
1764
1765static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1766 // ----------- S t a t e -------------
1767 // -- r0 : result being passed through
1768 // -----------------------------------
1769 // Get the number of arguments passed (as a smi), tear down the frame and
1770 // then tear down the parameters.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1772 kPointerSize)));
1773
1774 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1775 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001776 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1777}
1778
1779
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780// static
1781void Builtins::Generate_Apply(MacroAssembler* masm) {
1782 // ----------- S t a t e -------------
1783 // -- r0 : argumentsList
1784 // -- r1 : target
1785 // -- r3 : new.target (checked to be constructor or undefined)
1786 // -- sp[0] : thisArgument
1787 // -----------------------------------
1788
1789 // Create the list of arguments from the array-like argumentsList.
1790 {
1791 Label create_arguments, create_array, create_runtime, done_create;
1792 __ JumpIfSmi(r0, &create_runtime);
1793
1794 // Load the map of argumentsList into r2.
1795 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1796
1797 // Load native context into r4.
1798 __ ldr(r4, NativeContextMemOperand());
1799
1800 // Check if argumentsList is an (unmodified) arguments object.
1801 __ ldr(ip, ContextMemOperand(r4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1802 __ cmp(ip, r2);
1803 __ b(eq, &create_arguments);
1804 __ ldr(ip, ContextMemOperand(r4, Context::STRICT_ARGUMENTS_MAP_INDEX));
1805 __ cmp(ip, r2);
1806 __ b(eq, &create_arguments);
1807
1808 // Check if argumentsList is a fast JSArray.
1809 __ CompareInstanceType(r2, ip, JS_ARRAY_TYPE);
1810 __ b(eq, &create_array);
1811
1812 // Ask the runtime to create the list (actually a FixedArray).
1813 __ bind(&create_runtime);
1814 {
1815 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1816 __ Push(r1, r3, r0);
1817 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1818 __ Pop(r1, r3);
1819 __ ldr(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
1820 __ SmiUntag(r2);
1821 }
1822 __ jmp(&done_create);
1823
1824 // Try to create the list from an arguments object.
1825 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001826 __ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827 __ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1828 __ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
1829 __ cmp(r2, ip);
1830 __ b(ne, &create_runtime);
1831 __ SmiUntag(r2);
1832 __ mov(r0, r4);
1833 __ b(&done_create);
1834
1835 // Try to create the list from a JSArray object.
1836 __ bind(&create_array);
1837 __ ldr(r2, FieldMemOperand(r2, Map::kBitField2Offset));
1838 __ DecodeField<Map::ElementsKindBits>(r2);
1839 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1840 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1841 STATIC_ASSERT(FAST_ELEMENTS == 2);
1842 __ cmp(r2, Operand(FAST_ELEMENTS));
1843 __ b(hi, &create_runtime);
1844 __ cmp(r2, Operand(FAST_HOLEY_SMI_ELEMENTS));
1845 __ b(eq, &create_runtime);
1846 __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
1847 __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
1848 __ SmiUntag(r2);
1849
1850 __ bind(&done_create);
1851 }
1852
1853 // Check for stack overflow.
1854 {
1855 // Check the stack for overflow. We are not trying to catch interruptions
1856 // (i.e. debug break and preemption) here, so check the "real stack limit".
1857 Label done;
1858 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1859 // Make ip the space we have left. The stack might already be overflowed
1860 // here which will cause ip to become negative.
1861 __ sub(ip, sp, ip);
1862 // Check if the arguments will overflow the stack.
1863 __ cmp(ip, Operand(r2, LSL, kPointerSizeLog2));
1864 __ b(gt, &done); // Signed comparison.
1865 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1866 __ bind(&done);
1867 }
1868
1869 // ----------- S t a t e -------------
1870 // -- r1 : target
1871 // -- r0 : args (a FixedArray built from argumentsList)
1872 // -- r2 : len (number of elements to push from args)
1873 // -- r3 : new.target (checked to be constructor or undefined)
1874 // -- sp[0] : thisArgument
1875 // -----------------------------------
1876
1877 // Push arguments onto the stack (thisArgument is already on the stack).
1878 {
1879 __ mov(r4, Operand(0));
1880 Label done, loop;
1881 __ bind(&loop);
1882 __ cmp(r4, r2);
1883 __ b(eq, &done);
1884 __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2));
1885 __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize));
1886 __ Push(ip);
1887 __ add(r4, r4, Operand(1));
1888 __ b(&loop);
1889 __ bind(&done);
1890 __ Move(r0, r4);
1891 }
1892
1893 // Dispatch to Call or Construct depending on whether new.target is undefined.
1894 {
1895 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
1896 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1897 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1898 }
1899}
1900
Ben Murdoch097c5b22016-05-18 11:27:45 +01001901namespace {
1902
1903// Drops top JavaScript frame and an arguments adaptor frame below it (if
1904// present) preserving all the arguments prepared for current call.
1905// Does nothing if debugger is currently active.
1906// ES6 14.6.3. PrepareForTailCall
1907//
1908// Stack structure for the function g() tail calling f():
1909//
1910// ------- Caller frame: -------
1911// | ...
1912// | g()'s arg M
1913// | ...
1914// | g()'s arg 1
1915// | g()'s receiver arg
1916// | g()'s caller pc
1917// ------- g()'s frame: -------
1918// | g()'s caller fp <- fp
1919// | g()'s context
1920// | function pointer: g
1921// | -------------------------
1922// | ...
1923// | ...
1924// | f()'s arg N
1925// | ...
1926// | f()'s arg 1
1927// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1928// ----------------------
1929//
1930void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1931 Register scratch1, Register scratch2,
1932 Register scratch3) {
1933 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1934 Comment cmnt(masm, "[ PrepareForTailCall");
1935
1936 // Prepare for tail call only if the debugger is not active.
1937 Label done;
1938 ExternalReference debug_is_active =
1939 ExternalReference::debug_is_active_address(masm->isolate());
1940 __ mov(scratch1, Operand(debug_is_active));
1941 __ ldrb(scratch1, MemOperand(scratch1));
1942 __ cmp(scratch1, Operand(0));
1943 __ b(ne, &done);
1944
1945 // Drop possible interpreter handler/stub frame.
1946 {
1947 Label no_interpreter_frame;
1948 __ ldr(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset));
1949 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
1950 __ b(ne, &no_interpreter_frame);
1951 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1952 __ bind(&no_interpreter_frame);
1953 }
1954
1955 // Check if next frame is an arguments adaptor frame.
1956 Label no_arguments_adaptor, formal_parameter_count_loaded;
1957 __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1958 __ ldr(scratch3,
1959 MemOperand(scratch2, StandardFrameConstants::kContextOffset));
1960 __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1961 __ b(ne, &no_arguments_adaptor);
1962
1963 // Drop arguments adaptor frame and load arguments count.
1964 __ mov(fp, scratch2);
1965 __ ldr(scratch1,
1966 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1967 __ SmiUntag(scratch1);
1968 __ b(&formal_parameter_count_loaded);
1969
1970 __ bind(&no_arguments_adaptor);
1971 // Load caller's formal parameter count
1972 __ ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1973 __ ldr(scratch1,
1974 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
1975 __ ldr(scratch1,
1976 FieldMemOperand(scratch1,
1977 SharedFunctionInfo::kFormalParameterCountOffset));
1978 __ SmiUntag(scratch1);
1979
1980 __ bind(&formal_parameter_count_loaded);
1981
1982 // Calculate the end of destination area where we will put the arguments
1983 // after we drop current frame. We add kPointerSize to count the receiver
1984 // argument which is not included into formal parameters count.
1985 Register dst_reg = scratch2;
1986 __ add(dst_reg, fp, Operand(scratch1, LSL, kPointerSizeLog2));
1987 __ add(dst_reg, dst_reg,
1988 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
1989
1990 Register src_reg = scratch1;
1991 __ add(src_reg, sp, Operand(args_reg, LSL, kPointerSizeLog2));
1992 // Count receiver argument as well (not included in args_reg).
1993 __ add(src_reg, src_reg, Operand(kPointerSize));
1994
1995 if (FLAG_debug_code) {
1996 __ cmp(src_reg, dst_reg);
1997 __ Check(lo, kStackAccessBelowStackPointer);
1998 }
1999
2000 // Restore caller's frame pointer and return address now as they will be
2001 // overwritten by the copying loop.
2002 __ ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
2003 __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2004
2005 // Now copy callee arguments to the caller frame going backwards to avoid
2006 // callee arguments corruption (source and destination areas could overlap).
2007
2008 // Both src_reg and dst_reg are pointing to the word after the one to copy,
2009 // so they must be pre-decremented in the loop.
2010 Register tmp_reg = scratch3;
2011 Label loop, entry;
2012 __ b(&entry);
2013 __ bind(&loop);
2014 __ ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
2015 __ str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
2016 __ bind(&entry);
2017 __ cmp(sp, src_reg);
2018 __ b(ne, &loop);
2019
2020 // Leave current frame.
2021 __ mov(sp, dst_reg);
2022
2023 __ bind(&done);
2024}
2025} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002026
2027// static
2028void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002029 ConvertReceiverMode mode,
2030 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031 // ----------- S t a t e -------------
2032 // -- r0 : the number of arguments (not including the receiver)
2033 // -- r1 : the function to call (checked to be a JSFunction)
2034 // -----------------------------------
2035 __ AssertFunction(r1);
2036
2037 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2038 // Check that the function is not a "classConstructor".
2039 Label class_constructor;
2040 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2041 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
2042 __ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2043 __ b(ne, &class_constructor);
2044
2045 // Enter the context of the function; ToObject has to run in the function
2046 // context, and we also need to take the global proxy from the function
2047 // context in case of conversion.
2048 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2049 SharedFunctionInfo::kStrictModeByteOffset);
2050 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2051 // We need to convert the receiver for non-native sloppy mode functions.
2052 Label done_convert;
2053 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
2054 __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2055 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2056 __ b(ne, &done_convert);
2057 {
2058 // ----------- S t a t e -------------
2059 // -- r0 : the number of arguments (not including the receiver)
2060 // -- r1 : the function to call (checked to be a JSFunction)
2061 // -- r2 : the shared function info.
2062 // -- cp : the function context.
2063 // -----------------------------------
2064
2065 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2066 // Patch receiver to global proxy.
2067 __ LoadGlobalProxy(r3);
2068 } else {
2069 Label convert_to_object, convert_receiver;
2070 __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2071 __ JumpIfSmi(r3, &convert_to_object);
2072 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2073 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2074 __ b(hs, &done_convert);
2075 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2076 Label convert_global_proxy;
2077 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
2078 &convert_global_proxy);
2079 __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
2080 __ bind(&convert_global_proxy);
2081 {
2082 // Patch receiver to global proxy.
2083 __ LoadGlobalProxy(r3);
2084 }
2085 __ b(&convert_receiver);
2086 }
2087 __ bind(&convert_to_object);
2088 {
2089 // Convert receiver using ToObject.
2090 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2091 // in the fast case? (fall back to AllocateInNewSpace?)
2092 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2093 __ SmiTag(r0);
2094 __ Push(r0, r1);
2095 __ mov(r0, r3);
2096 ToObjectStub stub(masm->isolate());
2097 __ CallStub(&stub);
2098 __ mov(r3, r0);
2099 __ Pop(r0, r1);
2100 __ SmiUntag(r0);
2101 }
2102 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2103 __ bind(&convert_receiver);
2104 }
2105 __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2106 }
2107 __ bind(&done_convert);
2108
2109 // ----------- S t a t e -------------
2110 // -- r0 : the number of arguments (not including the receiver)
2111 // -- r1 : the function to call (checked to be a JSFunction)
2112 // -- r2 : the shared function info.
2113 // -- cp : the function context.
2114 // -----------------------------------
2115
Ben Murdoch097c5b22016-05-18 11:27:45 +01002116 if (tail_call_mode == TailCallMode::kAllow) {
2117 PrepareForTailCall(masm, r0, r3, r4, r5);
2118 }
2119
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002120 __ ldr(r2,
2121 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
2122 __ SmiUntag(r2);
2123 ParameterCount actual(r0);
2124 ParameterCount expected(r2);
2125 __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION,
2126 CheckDebugStepCallWrapper());
2127
2128 // The function is a "classConstructor", need to raise an exception.
2129 __ bind(&class_constructor);
2130 {
2131 FrameScope frame(masm, StackFrame::INTERNAL);
2132 __ push(r1);
2133 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2134 }
2135}
2136
2137
2138namespace {
2139
2140void Generate_PushBoundArguments(MacroAssembler* masm) {
2141 // ----------- S t a t e -------------
2142 // -- r0 : the number of arguments (not including the receiver)
2143 // -- r1 : target (checked to be a JSBoundFunction)
2144 // -- r3 : new.target (only in case of [[Construct]])
2145 // -----------------------------------
2146
2147 // Load [[BoundArguments]] into r2 and length of that into r4.
2148 Label no_bound_arguments;
2149 __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
2150 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2151 __ SmiUntag(r4);
2152 __ cmp(r4, Operand(0));
2153 __ b(eq, &no_bound_arguments);
2154 {
2155 // ----------- S t a t e -------------
2156 // -- r0 : the number of arguments (not including the receiver)
2157 // -- r1 : target (checked to be a JSBoundFunction)
2158 // -- r2 : the [[BoundArguments]] (implemented as FixedArray)
2159 // -- r3 : new.target (only in case of [[Construct]])
2160 // -- r4 : the number of [[BoundArguments]]
2161 // -----------------------------------
2162
2163 // Reserve stack space for the [[BoundArguments]].
2164 {
2165 Label done;
2166 __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2167 // Check the stack for overflow. We are not trying to catch interruptions
2168 // (i.e. debug break and preemption) here, so check the "real stack
2169 // limit".
2170 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2171 __ b(gt, &done); // Signed comparison.
2172 // Restore the stack pointer.
2173 __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2174 {
2175 FrameScope scope(masm, StackFrame::MANUAL);
2176 __ EnterFrame(StackFrame::INTERNAL);
2177 __ CallRuntime(Runtime::kThrowStackOverflow);
2178 }
2179 __ bind(&done);
2180 }
2181
2182 // Relocate arguments down the stack.
2183 {
2184 Label loop, done_loop;
2185 __ mov(r5, Operand(0));
2186 __ bind(&loop);
2187 __ cmp(r5, r0);
2188 __ b(gt, &done_loop);
2189 __ ldr(ip, MemOperand(sp, r4, LSL, kPointerSizeLog2));
2190 __ str(ip, MemOperand(sp, r5, LSL, kPointerSizeLog2));
2191 __ add(r4, r4, Operand(1));
2192 __ add(r5, r5, Operand(1));
2193 __ b(&loop);
2194 __ bind(&done_loop);
2195 }
2196
2197 // Copy [[BoundArguments]] to the stack (below the arguments).
2198 {
2199 Label loop;
2200 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2201 __ SmiUntag(r4);
2202 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2203 __ bind(&loop);
2204 __ sub(r4, r4, Operand(1), SetCC);
2205 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2));
2206 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2207 __ add(r0, r0, Operand(1));
2208 __ b(gt, &loop);
2209 }
2210 }
2211 __ bind(&no_bound_arguments);
2212}
2213
2214} // namespace
2215
2216
2217// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002218void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2219 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002220 // ----------- S t a t e -------------
2221 // -- r0 : the number of arguments (not including the receiver)
2222 // -- r1 : the function to call (checked to be a JSBoundFunction)
2223 // -----------------------------------
2224 __ AssertBoundFunction(r1);
2225
Ben Murdoch097c5b22016-05-18 11:27:45 +01002226 if (tail_call_mode == TailCallMode::kAllow) {
2227 PrepareForTailCall(masm, r0, r3, r4, r5);
2228 }
2229
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002230 // Patch the receiver to [[BoundThis]].
2231 __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
2232 __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2233
2234 // Push the [[BoundArguments]] onto the stack.
2235 Generate_PushBoundArguments(masm);
2236
2237 // Call the [[BoundTargetFunction]] via the Call builtin.
2238 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2239 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2240 masm->isolate())));
2241 __ ldr(ip, MemOperand(ip));
2242 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2243}
2244
2245
2246// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002247void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2248 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002249 // ----------- S t a t e -------------
2250 // -- r0 : the number of arguments (not including the receiver)
2251 // -- r1 : the target to call (can be any Object).
2252 // -----------------------------------
2253
2254 Label non_callable, non_function, non_smi;
2255 __ JumpIfSmi(r1, &non_callable);
2256 __ bind(&non_smi);
2257 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002258 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 RelocInfo::CODE_TARGET, eq);
2260 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002261 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002262 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002263
2264 // Check if target has a [[Call]] internal method.
2265 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2266 __ tst(r4, Operand(1 << Map::kIsCallable));
2267 __ b(eq, &non_callable);
2268
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 __ cmp(r5, Operand(JS_PROXY_TYPE));
2270 __ b(ne, &non_function);
2271
Ben Murdoch097c5b22016-05-18 11:27:45 +01002272 // 0. Prepare for tail call if necessary.
2273 if (tail_call_mode == TailCallMode::kAllow) {
2274 PrepareForTailCall(masm, r0, r3, r4, r5);
2275 }
2276
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002277 // 1. Runtime fallback for Proxy [[Call]].
2278 __ Push(r1);
2279 // Increase the arguments size to include the pushed function and the
2280 // existing receiver on the stack.
2281 __ add(r0, r0, Operand(2));
2282 // Tail-call to the runtime.
2283 __ JumpToExternalReference(
2284 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2285
2286 // 2. Call to something else, which might have a [[Call]] internal method (if
2287 // not we raise an exception).
2288 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002289 // Overwrite the original receiver the (original) target.
2290 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2291 // Let the "call_as_function_delegate" take care of the rest.
2292 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
2293 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002294 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002295 RelocInfo::CODE_TARGET);
2296
2297 // 3. Call to something that is not callable.
2298 __ bind(&non_callable);
2299 {
2300 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2301 __ Push(r1);
2302 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2303 }
2304}
2305
2306
2307// static
2308void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2309 // ----------- S t a t e -------------
2310 // -- r0 : the number of arguments (not including the receiver)
2311 // -- r1 : the constructor to call (checked to be a JSFunction)
2312 // -- r3 : the new target (checked to be a constructor)
2313 // -----------------------------------
2314 __ AssertFunction(r1);
2315
2316 // Calling convention for function specific ConstructStubs require
2317 // r2 to contain either an AllocationSite or undefined.
2318 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2319
2320 // Tail call to the function-specific construct stub (still in the caller
2321 // context at this point).
2322 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2323 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2324 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2325}
2326
2327
2328// static
2329void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2330 // ----------- S t a t e -------------
2331 // -- r0 : the number of arguments (not including the receiver)
2332 // -- r1 : the function to call (checked to be a JSBoundFunction)
2333 // -- r3 : the new target (checked to be a constructor)
2334 // -----------------------------------
2335 __ AssertBoundFunction(r1);
2336
2337 // Push the [[BoundArguments]] onto the stack.
2338 Generate_PushBoundArguments(masm);
2339
2340 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2341 __ cmp(r1, r3);
2342 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2343 eq);
2344
2345 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2346 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2347 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2348 __ ldr(ip, MemOperand(ip));
2349 __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2350}
2351
2352
2353// static
2354void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2355 // ----------- S t a t e -------------
2356 // -- r0 : the number of arguments (not including the receiver)
2357 // -- r1 : the constructor to call (checked to be a JSProxy)
2358 // -- r3 : the new target (either the same as the constructor or
2359 // the JSFunction on which new was invoked initially)
2360 // -----------------------------------
2361
2362 // Call into the Runtime for Proxy [[Construct]].
2363 __ Push(r1);
2364 __ Push(r3);
2365 // Include the pushed new_target, constructor and the receiver.
2366 __ add(r0, r0, Operand(3));
2367 // Tail-call to the runtime.
2368 __ JumpToExternalReference(
2369 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2370}
2371
2372
2373// static
2374void Builtins::Generate_Construct(MacroAssembler* masm) {
2375 // ----------- S t a t e -------------
2376 // -- r0 : the number of arguments (not including the receiver)
2377 // -- r1 : the constructor to call (can be any Object)
2378 // -- r3 : the new target (either the same as the constructor or
2379 // the JSFunction on which new was invoked initially)
2380 // -----------------------------------
2381
2382 // Check if target is a Smi.
2383 Label non_constructor;
2384 __ JumpIfSmi(r1, &non_constructor);
2385
2386 // Dispatch based on instance type.
2387 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
2388 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2389 RelocInfo::CODE_TARGET, eq);
2390
2391 // Check if target has a [[Construct]] internal method.
2392 __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
2393 __ tst(r2, Operand(1 << Map::kIsConstructor));
2394 __ b(eq, &non_constructor);
2395
2396 // Only dispatch to bound functions after checking whether they are
2397 // constructors.
2398 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2399 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2400 RelocInfo::CODE_TARGET, eq);
2401
2402 // Only dispatch to proxies after checking whether they are constructors.
2403 __ cmp(r5, Operand(JS_PROXY_TYPE));
2404 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2405 eq);
2406
2407 // Called Construct on an exotic Object with a [[Construct]] internal method.
2408 {
2409 // Overwrite the original receiver with the (original) target.
2410 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2411 // Let the "call_as_constructor_delegate" take care of the rest.
2412 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
2413 __ Jump(masm->isolate()->builtins()->CallFunction(),
2414 RelocInfo::CODE_TARGET);
2415 }
2416
2417 // Called Construct on an Object that doesn't have a [[Construct]] internal
2418 // method.
2419 __ bind(&non_constructor);
2420 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2421 RelocInfo::CODE_TARGET);
2422}
2423
2424
Steve Blocka7e24c12009-10-30 11:49:00 +00002425void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2426 // ----------- S t a t e -------------
2427 // -- r0 : actual number of arguments
2428 // -- r1 : function (passed through to callee)
2429 // -- r2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002430 // -- r3 : new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002431 // -----------------------------------
2432
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002433 Label invoke, dont_adapt_arguments, stack_overflow;
Steve Blocka7e24c12009-10-30 11:49:00 +00002434
2435 Label enough, too_few;
Steve Block6ded16b2010-05-10 14:33:55 +01002436 __ cmp(r0, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00002437 __ b(lt, &too_few);
2438 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2439 __ b(eq, &dont_adapt_arguments);
2440
2441 { // Enough parameters: actual >= expected
2442 __ bind(&enough);
2443 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002444 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Steve Blocka7e24c12009-10-30 11:49:00 +00002445
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446 // Calculate copy start address into r0 and copy end address into r4.
Steve Blocka7e24c12009-10-30 11:49:00 +00002447 // r0: actual number of arguments as a smi
2448 // r1: function
2449 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002450 // r3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002452 // adjust for return address and receiver
2453 __ add(r0, r0, Operand(2 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002454 __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00002455
2456 // Copy the arguments (including the receiver) to the new stack frame.
2457 // r0: copy start address
2458 // r1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002459 // r2: expected number of arguments
2460 // r3: new target (passed through to callee)
2461 // r4: copy end address
Steve Blocka7e24c12009-10-30 11:49:00 +00002462
2463 Label copy;
2464 __ bind(&copy);
2465 __ ldr(ip, MemOperand(r0, 0));
2466 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002467 __ cmp(r0, r4); // Compare before moving to next argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00002468 __ sub(r0, r0, Operand(kPointerSize));
2469 __ b(ne, &copy);
2470
2471 __ b(&invoke);
2472 }
2473
2474 { // Too few parameters: Actual < expected
2475 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002476
2477 // If the function is strong we need to throw an error.
2478 Label no_strong_error;
2479 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2480 __ ldr(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
2481 __ tst(r5, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
2482 kSmiTagSize)));
2483 __ b(eq, &no_strong_error);
2484
2485 // What we really care about is the required number of arguments.
2486 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kLengthOffset));
2487 __ cmp(r0, Operand::SmiUntag(r4));
2488 __ b(ge, &no_strong_error);
2489
2490 {
2491 FrameScope frame(masm, StackFrame::MANUAL);
2492 EnterArgumentsAdaptorFrame(masm);
2493 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2494 }
2495
2496 __ bind(&no_strong_error);
Steve Blocka7e24c12009-10-30 11:49:00 +00002497 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002498 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Steve Blocka7e24c12009-10-30 11:49:00 +00002499
2500 // Calculate copy start address into r0 and copy end address is fp.
2501 // r0: actual number of arguments as a smi
2502 // r1: function
2503 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002504 // r3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002505 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
Steve Blocka7e24c12009-10-30 11:49:00 +00002506
2507 // Copy the arguments (including the receiver) to the new stack frame.
2508 // r0: copy start address
2509 // r1: function
2510 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002511 // r3: new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002512 Label copy;
2513 __ bind(&copy);
2514 // Adjust load for return address and receiver.
2515 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
2516 __ push(ip);
2517 __ cmp(r0, fp); // Compare before moving to next argument.
2518 __ sub(r0, r0, Operand(kPointerSize));
2519 __ b(ne, &copy);
2520
2521 // Fill the remaining expected arguments with undefined.
2522 // r1: function
2523 // r2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002524 // r3: new target (passed through to callee)
Steve Blocka7e24c12009-10-30 11:49:00 +00002525 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002526 __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002527 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002528 __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002529 2 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00002530
2531 Label fill;
2532 __ bind(&fill);
2533 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002534 __ cmp(sp, r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002535 __ b(ne, &fill);
2536 }
2537
2538 // Call the entry point.
2539 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002540 __ mov(r0, r2);
2541 // r0 : expected number of arguments
2542 // r1 : function (passed through to callee)
2543 // r3 : new target (passed through to callee)
2544 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2545 __ Call(r4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002546
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002547 // Store offset of return address for deoptimizer.
2548 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2549
Steve Blocka7e24c12009-10-30 11:49:00 +00002550 // Exit frame and return.
2551 LeaveArgumentsAdaptorFrame(masm);
2552 __ Jump(lr);
2553
2554
2555 // -------------------------------------------
2556 // Dont adapt arguments.
2557 // -------------------------------------------
2558 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002559 __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2560 __ Jump(r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002561
2562 __ bind(&stack_overflow);
2563 {
2564 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002565 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002566 __ bkpt(0);
2567 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002568}
2569
2570
2571#undef __
2572
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002573} // namespace internal
2574} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01002575
2576#endif // V8_TARGET_ARCH_ARM