blob: d0136f550d7a12b5f0799fda7e76898dafc9e131 [file] [log] [blame]
Ben Murdoch8b112d22011-06-08 16:22:53 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Ben Murdoch8b112d22011-06-08 16:22:53 +010032#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "debug.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010034#include "deoptimizer.h"
35#include "full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "runtime.h"
37
38namespace v8 {
39namespace internal {
40
41
42#define __ ACCESS_MASM(masm)
43
44
Leon Clarkee46be812010-01-19 14:06:41 +000045void Builtins::Generate_Adaptor(MacroAssembler* masm,
46 CFunctionId id,
47 BuiltinExtraArguments extra_args) {
48 // ----------- S t a t e -------------
49 // -- r0 : number of arguments excluding receiver
50 // -- r1 : called function (only guaranteed when
51 // extra_args requires it)
52 // -- cp : context
53 // -- sp[0] : last argument
54 // -- ...
55 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
56 // -- sp[4 * argc] : receiver
57 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +000058
Leon Clarkee46be812010-01-19 14:06:41 +000059 // Insert extra arguments.
60 int num_extra_args = 0;
61 if (extra_args == NEEDS_CALLED_FUNCTION) {
62 num_extra_args = 1;
63 __ push(r1);
64 } else {
65 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
66 }
67
Steve Block6ded16b2010-05-10 14:33:55 +010068 // JumpToExternalReference expects r0 to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000069 // including the receiver and the extra arguments.
70 __ add(r0, r0, Operand(num_extra_args + 1));
Steve Block44f0eee2011-05-26 01:26:41 +010071 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000072}
73
74
75// Load the built-in Array function from the current context.
76static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
77 // Load the global context.
78
79 __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
80 __ ldr(result,
81 FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
82 // Load the Array function from the global context.
83 __ ldr(result,
84 MemOperand(result,
85 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
86}
87
88
Steve Blocka7e24c12009-10-30 11:49:00 +000089// Allocate an empty JSArray. The allocated array is put into the result
90// register. An elements backing store is allocated with size initial_capacity
91// and filled with the hole values.
92static void AllocateEmptyJSArray(MacroAssembler* masm,
93 Register array_function,
94 Register result,
95 Register scratch1,
96 Register scratch2,
97 Register scratch3,
Steve Blocka7e24c12009-10-30 11:49:00 +000098 Label* gc_required) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +000099 const int initial_capacity = JSArray::kPreallocatedArrayElements;
100 STATIC_ASSERT(initial_capacity >= 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000101 // Load the initial map from the array function.
102 __ ldr(scratch1, FieldMemOperand(array_function,
103 JSFunction::kPrototypeOrInitialMapOffset));
104
105 // Allocate the JSArray object together with space for a fixed array with the
106 // requested elements.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000107 int size = JSArray::kSize;
108 if (initial_capacity > 0) {
109 size += FixedArray::SizeFor(initial_capacity);
110 }
Kristian Monsen25f61362010-05-21 11:50:48 +0100111 __ AllocateInNewSpace(size,
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 result,
113 scratch2,
114 scratch3,
115 gc_required,
116 TAG_OBJECT);
117
118 // Allocated the JSArray. Now initialize the fields except for the elements
119 // array.
120 // result: JSObject
121 // scratch1: initial map
122 // scratch2: start of next object
123 __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
124 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
125 __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
126 // Field JSArray::kElementsOffset is initialized later.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100127 __ mov(scratch3, Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
129
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000130 if (initial_capacity == 0) {
131 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
132 return;
133 }
134
Steve Blocka7e24c12009-10-30 11:49:00 +0000135 // Calculate the location of the elements array and set elements array member
136 // of the JSArray.
137 // result: JSObject
138 // scratch2: start of next object
Leon Clarkef7060e22010-06-03 12:02:55 +0100139 __ add(scratch1, result, Operand(JSArray::kSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000140 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
141
142 // Clear the heap tag on the elements array.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100143 __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000144
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100145 // Initialize the FixedArray and fill it with holes. FixedArray length is
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 // stored as a smi.
147 // result: JSObject
148 // scratch1: elements array (untagged)
149 // scratch2: start of next object
150 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000151 STATIC_ASSERT(0 * kPointerSize == FixedArray::kMapOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000152 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100153 __ mov(scratch3, Operand(Smi::FromInt(initial_capacity)));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000154 STATIC_ASSERT(1 * kPointerSize == FixedArray::kLengthOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
156
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000157 // Fill the FixedArray with the hole value. Inline the code if short.
158 STATIC_ASSERT(2 * kPointerSize == FixedArray::kHeaderSize);
Steve Blocka7e24c12009-10-30 11:49:00 +0000159 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000160 static const int kLoopUnfoldLimit = 4;
161 if (initial_capacity <= kLoopUnfoldLimit) {
162 for (int i = 0; i < initial_capacity; i++) {
163 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
164 }
165 } else {
166 Label loop, entry;
167 __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
168 __ b(&entry);
169 __ bind(&loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000170 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000171 __ bind(&entry);
172 __ cmp(scratch1, scratch2);
173 __ b(lt, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000174 }
175}
176
177// Allocate a JSArray with the number of elements stored in a register. The
178// register array_function holds the built-in Array function and the register
179// array_size holds the size of the array as a smi. The allocated array is put
180// into the result register and beginning and end of the FixedArray elements
181// storage is put into registers elements_array_storage and elements_array_end
182// (see below for when that is not the case). If the parameter fill_with_holes
183// is true the allocated elements backing store is filled with the hole values
184// otherwise it is left uninitialized. When the backing store is filled the
185// register elements_array_storage is scratched.
186static void AllocateJSArray(MacroAssembler* masm,
187 Register array_function, // Array function.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000188 Register array_size, // As a smi, cannot be 0.
Steve Blocka7e24c12009-10-30 11:49:00 +0000189 Register result,
190 Register elements_array_storage,
191 Register elements_array_end,
192 Register scratch1,
193 Register scratch2,
194 bool fill_with_hole,
195 Label* gc_required) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000196 // Load the initial map from the array function.
197 __ ldr(elements_array_storage,
198 FieldMemOperand(array_function,
199 JSFunction::kPrototypeOrInitialMapOffset));
200
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000201 if (FLAG_debug_code) { // Assert that array size is not zero.
202 __ tst(array_size, array_size);
203 __ Assert(ne, "array size is unexpectedly 0");
204 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000205
206 // Allocate the JSArray object together with space for a FixedArray with the
207 // requested number of elements.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000208 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000209 __ mov(elements_array_end,
210 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
211 __ add(elements_array_end,
212 elements_array_end,
213 Operand(array_size, ASR, kSmiTagSize));
Kristian Monsen25f61362010-05-21 11:50:48 +0100214 __ AllocateInNewSpace(
215 elements_array_end,
216 result,
217 scratch1,
218 scratch2,
219 gc_required,
220 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Blocka7e24c12009-10-30 11:49:00 +0000221
222 // Allocated the JSArray. Now initialize the fields except for the elements
223 // array.
224 // result: JSObject
225 // elements_array_storage: initial map
226 // array_size: size of array (smi)
Steve Blocka7e24c12009-10-30 11:49:00 +0000227 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
228 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
229 __ str(elements_array_storage,
230 FieldMemOperand(result, JSArray::kPropertiesOffset));
231 // Field JSArray::kElementsOffset is initialized later.
232 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
233
234 // Calculate the location of the elements array and set elements array member
235 // of the JSArray.
236 // result: JSObject
237 // array_size: size of array (smi)
238 __ add(elements_array_storage, result, Operand(JSArray::kSize));
239 __ str(elements_array_storage,
240 FieldMemOperand(result, JSArray::kElementsOffset));
241
242 // Clear the heap tag on the elements array.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000243 STATIC_ASSERT(kSmiTag == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100244 __ sub(elements_array_storage,
245 elements_array_storage,
246 Operand(kHeapObjectTag));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100247 // Initialize the fixed array and fill it with holes. FixedArray length is
Steve Blocka7e24c12009-10-30 11:49:00 +0000248 // stored as a smi.
249 // result: JSObject
250 // elements_array_storage: elements array (untagged)
251 // array_size: size of array (smi)
Steve Blocka7e24c12009-10-30 11:49:00 +0000252 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
253 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
254 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000255 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000256 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
257 __ str(array_size,
258 MemOperand(elements_array_storage, kPointerSize, PostIndex));
259
260 // Calculate elements array and elements array end.
261 // result: JSObject
262 // elements_array_storage: elements array element storage
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100263 // array_size: smi-tagged size of elements array
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000264 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000265 __ add(elements_array_end,
266 elements_array_storage,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100267 Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000268
269 // Fill the allocated FixedArray with the hole value if requested.
270 // result: JSObject
271 // elements_array_storage: elements array element storage
272 // elements_array_end: start of next object
273 if (fill_with_hole) {
274 Label loop, entry;
275 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
276 __ jmp(&entry);
277 __ bind(&loop);
278 __ str(scratch1,
279 MemOperand(elements_array_storage, kPointerSize, PostIndex));
280 __ bind(&entry);
281 __ cmp(elements_array_storage, elements_array_end);
282 __ b(lt, &loop);
283 }
284}
285
286// Create a new array for the built-in Array function. This function allocates
287// the JSArray object and the FixedArray elements array and initializes these.
288// If the Array cannot be constructed in native code the runtime is called. This
289// function assumes the following state:
290// r0: argc
291// r1: constructor (built-in Array function)
292// lr: return address
293// sp[0]: last argument
294// This function is used for both construct and normal calls of Array. The only
295// difference between handling a construct call and a normal call is that for a
296// construct call the constructor function in r1 needs to be preserved for
297// entering the generic code. In both cases argc in r0 needs to be preserved.
298// Both registers are preserved by this code so no need to differentiate between
299// construct call and normal call.
300static void ArrayNativeCode(MacroAssembler* masm,
Steve Blockd0582a62009-12-15 09:54:21 +0000301 Label* call_generic_code) {
Steve Block44f0eee2011-05-26 01:26:41 +0100302 Counters* counters = masm->isolate()->counters();
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000303 Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array;
Steve Blocka7e24c12009-10-30 11:49:00 +0000304
305 // Check for array construction with zero arguments or one.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100306 __ cmp(r0, Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +0000307 __ b(ne, &argc_one_or_more);
308
309 // Handle construction of an empty array.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000310 __ bind(&empty_array);
Steve Blocka7e24c12009-10-30 11:49:00 +0000311 AllocateEmptyJSArray(masm,
312 r1,
313 r2,
314 r3,
315 r4,
316 r5,
Steve Blocka7e24c12009-10-30 11:49:00 +0000317 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100318 __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 // Setup return value, remove receiver from stack and return.
320 __ mov(r0, r2);
321 __ add(sp, sp, Operand(kPointerSize));
322 __ Jump(lr);
323
324 // Check for one argument. Bail out if argument is not smi or if it is
325 // negative.
326 __ bind(&argc_one_or_more);
327 __ cmp(r0, Operand(1));
328 __ b(ne, &argc_two_or_more);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000329 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000330 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000331 __ tst(r2, r2);
332 __ b(ne, &not_empty_array);
333 __ Drop(1); // Adjust stack.
334 __ mov(r0, Operand(0)); // Treat this as a call with argc of zero.
335 __ b(&empty_array);
336
337 __ bind(&not_empty_array);
Steve Blocka7e24c12009-10-30 11:49:00 +0000338 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
339 __ b(ne, call_generic_code);
340
341 // Handle construction of an empty array of a certain size. Bail out if size
342 // is too large to actually allocate an elements array.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000343 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000344 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
345 __ b(ge, call_generic_code);
346
347 // r0: argc
348 // r1: constructor
349 // r2: array_size (smi)
350 // sp[0]: argument
351 AllocateJSArray(masm,
352 r1,
353 r2,
354 r3,
355 r4,
356 r5,
357 r6,
358 r7,
359 true,
360 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100361 __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
Steve Blocka7e24c12009-10-30 11:49:00 +0000362 // Setup return value, remove receiver and argument from stack and return.
363 __ mov(r0, r3);
364 __ add(sp, sp, Operand(2 * kPointerSize));
365 __ Jump(lr);
366
367 // Handle construction of an array from a list of arguments.
368 __ bind(&argc_two_or_more);
369 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
370
371 // r0: argc
372 // r1: constructor
373 // r2: array_size (smi)
374 // sp[0]: last argument
375 AllocateJSArray(masm,
376 r1,
377 r2,
378 r3,
379 r4,
380 r5,
381 r6,
382 r7,
383 false,
384 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100385 __ IncrementCounter(counters->array_function_native(), 1, r2, r6);
Steve Blocka7e24c12009-10-30 11:49:00 +0000386
387 // Fill arguments as array elements. Copy from the top of the stack (last
388 // element) to the array backing store filling it backwards. Note:
389 // elements_array_end points after the backing store therefore PreIndex is
390 // used when filling the backing store.
391 // r0: argc
392 // r3: JSArray
393 // r4: elements_array storage start (untagged)
394 // r5: elements_array_end (untagged)
395 // sp[0]: last argument
396 Label loop, entry;
397 __ jmp(&entry);
398 __ bind(&loop);
399 __ ldr(r2, MemOperand(sp, kPointerSize, PostIndex));
400 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
401 __ bind(&entry);
402 __ cmp(r4, r5);
403 __ b(lt, &loop);
404
405 // Remove caller arguments and receiver from the stack, setup return value and
406 // return.
407 // r0: argc
408 // r3: JSArray
409 // sp[0]: receiver
410 __ add(sp, sp, Operand(kPointerSize));
411 __ mov(r0, r3);
412 __ Jump(lr);
413}
414
415
416void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
417 // ----------- S t a t e -------------
418 // -- r0 : number of arguments
419 // -- lr : return address
420 // -- sp[...]: constructor arguments
421 // -----------------------------------
422 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
423
424 // Get the Array function.
425 GenerateLoadArrayFunction(masm, r1);
426
427 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100428 // Initial map for the builtin Array functions should be maps.
Steve Blocka7e24c12009-10-30 11:49:00 +0000429 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
430 __ tst(r2, Operand(kSmiTagMask));
431 __ Assert(ne, "Unexpected initial map for Array function");
432 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
433 __ Assert(eq, "Unexpected initial map for Array function");
434 }
435
436 // Run the native code for the Array function called as a normal function.
437 ArrayNativeCode(masm, &generic_array_code);
438
439 // Jump to the generic array code if the specialized code cannot handle
440 // the construction.
441 __ bind(&generic_array_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100442
443 Handle<Code> array_code =
444 masm->isolate()->builtins()->ArrayCodeGeneric();
Steve Blocka7e24c12009-10-30 11:49:00 +0000445 __ Jump(array_code, RelocInfo::CODE_TARGET);
446}
447
448
449void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
450 // ----------- S t a t e -------------
451 // -- r0 : number of arguments
452 // -- r1 : constructor function
453 // -- lr : return address
454 // -- sp[...]: constructor arguments
455 // -----------------------------------
456 Label generic_constructor;
457
458 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100459 // The array construct code is only set for the builtin and internal
460 // Array functions which always have a map.
Steve Blocka7e24c12009-10-30 11:49:00 +0000461 // Initial map for the builtin Array function should be a map.
462 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
463 __ tst(r2, Operand(kSmiTagMask));
464 __ Assert(ne, "Unexpected initial map for Array function");
465 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
466 __ Assert(eq, "Unexpected initial map for Array function");
467 }
468
469 // Run the native code for the Array function called as a constructor.
470 ArrayNativeCode(masm, &generic_constructor);
471
472 // Jump to the generic construct code in case the specialized code cannot
473 // handle the construction.
474 __ bind(&generic_constructor);
Steve Block44f0eee2011-05-26 01:26:41 +0100475 Handle<Code> generic_construct_stub =
476 masm->isolate()->builtins()->JSConstructStubGeneric();
Steve Blocka7e24c12009-10-30 11:49:00 +0000477 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
478}
479
480
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100481void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800482 // ----------- S t a t e -------------
483 // -- r0 : number of arguments
484 // -- r1 : constructor function
485 // -- lr : return address
486 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
487 // -- sp[argc * 4] : receiver
488 // -----------------------------------
Steve Block44f0eee2011-05-26 01:26:41 +0100489 Counters* counters = masm->isolate()->counters();
490 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800491
492 Register function = r1;
493 if (FLAG_debug_code) {
494 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
495 __ cmp(function, Operand(r2));
496 __ Assert(eq, "Unexpected String function");
497 }
498
499 // Load the first arguments in r0 and get rid of the rest.
500 Label no_arguments;
Ben Murdochb8e0da22011-05-16 14:20:40 +0100501 __ cmp(r0, Operand(0, RelocInfo::NONE));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800502 __ b(eq, &no_arguments);
503 // First args = sp[(argc - 1) * 4].
504 __ sub(r0, r0, Operand(1));
505 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
506 // sp now point to args[0], drop args[0] + receiver.
507 __ Drop(2);
508
509 Register argument = r2;
510 Label not_cached, argument_is_string;
511 NumberToStringStub::GenerateLookupNumberStringCache(
512 masm,
513 r0, // Input.
514 argument, // Result.
515 r3, // Scratch.
516 r4, // Scratch.
517 r5, // Scratch.
518 false, // Is it a Smi?
519 &not_cached);
Steve Block44f0eee2011-05-26 01:26:41 +0100520 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800521 __ bind(&argument_is_string);
522
523 // ----------- S t a t e -------------
524 // -- r2 : argument converted to string
525 // -- r1 : constructor function
526 // -- lr : return address
527 // -----------------------------------
528
529 Label gc_required;
530 __ AllocateInNewSpace(JSValue::kSize,
531 r0, // Result.
532 r3, // Scratch.
533 r4, // Scratch.
534 &gc_required,
535 TAG_OBJECT);
536
537 // Initialising the String Object.
538 Register map = r3;
539 __ LoadGlobalFunctionInitialMap(function, map, r4);
540 if (FLAG_debug_code) {
541 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
542 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
543 __ Assert(eq, "Unexpected string wrapper instance size");
544 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100545 __ cmp(r4, Operand(0, RelocInfo::NONE));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800546 __ Assert(eq, "Unexpected unused properties of string wrapper");
547 }
548 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
549
550 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
551 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
552 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
553
554 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
555
556 // Ensure the object is fully initialized.
557 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
558
559 __ Ret();
560
561 // The argument was not found in the number to string cache. Check
562 // if it's a string already before calling the conversion builtin.
563 Label convert_argument;
564 __ bind(&not_cached);
Steve Block1e0659c2011-05-24 12:43:12 +0100565 __ JumpIfSmi(r0, &convert_argument);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800566
567 // Is it a String?
568 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
569 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000570 STATIC_ASSERT(kNotStringTag != 0);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800571 __ tst(r3, Operand(kIsNotStringMask));
572 __ b(ne, &convert_argument);
573 __ mov(argument, r0);
Steve Block44f0eee2011-05-26 01:26:41 +0100574 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800575 __ b(&argument_is_string);
576
577 // Invoke the conversion builtin and put the result into r2.
578 __ bind(&convert_argument);
579 __ push(function); // Preserve the function.
Steve Block44f0eee2011-05-26 01:26:41 +0100580 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000581 {
582 FrameScope scope(masm, StackFrame::INTERNAL);
583 __ push(r0);
584 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
585 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800586 __ pop(function);
587 __ mov(argument, r0);
588 __ b(&argument_is_string);
589
590 // Load the empty string into r2, remove the receiver from the
591 // stack, and jump back to the case where the argument is a string.
592 __ bind(&no_arguments);
593 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
594 __ Drop(1);
595 __ b(&argument_is_string);
596
597 // At this point the argument is already a string. Call runtime to
598 // create a string wrapper.
599 __ bind(&gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +0100600 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000601 {
602 FrameScope scope(masm, StackFrame::INTERNAL);
603 __ push(argument);
604 __ CallRuntime(Runtime::kNewStringWrapper, 1);
605 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800606 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100607}
608
609
Steve Blocka7e24c12009-10-30 11:49:00 +0000610void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
611 // ----------- S t a t e -------------
612 // -- r0 : number of arguments
613 // -- r1 : constructor function
614 // -- lr : return address
615 // -- sp[...]: constructor arguments
616 // -----------------------------------
617
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000618 Label slow, non_function_call;
Steve Blocka7e24c12009-10-30 11:49:00 +0000619 // Check that the function is not a smi.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000620 __ JumpIfSmi(r1, &non_function_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000621 // Check that the function is a JSFunction.
622 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000623 __ b(ne, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +0000624
625 // Jump to the function-specific construct stub.
626 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
627 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
628 __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
629
630 // r0: number of arguments
631 // r1: called object
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000632 // r2: object type
633 Label do_call;
634 __ bind(&slow);
635 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
636 __ b(ne, &non_function_call);
637 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
638 __ jmp(&do_call);
639
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 __ bind(&non_function_call);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000641 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
642 __ bind(&do_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000643 // Set expected number of arguments to zero (not changing r0).
Iain Merrick9ac36c92010-09-13 15:29:50 +0100644 __ mov(r2, Operand(0, RelocInfo::NONE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000645 __ SetCallKind(r5, CALL_AS_METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +0100646 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Steve Blocka7e24c12009-10-30 11:49:00 +0000647 RelocInfo::CODE_TARGET);
648}
649
650
Leon Clarkee46be812010-01-19 14:06:41 +0000651static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100652 bool is_api_function,
653 bool count_constructions) {
654 // Should never count constructions for api objects.
655 ASSERT(!is_api_function || !count_constructions);
656
Steve Block44f0eee2011-05-26 01:26:41 +0100657 Isolate* isolate = masm->isolate();
658
Steve Blocka7e24c12009-10-30 11:49:00 +0000659 // Enter a construct frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000660 {
661 FrameScope scope(masm, StackFrame::CONSTRUCT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000662
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000663 // Preserve the two incoming parameters on the stack.
664 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
665 __ push(r0); // Smi-tagged arguments count.
666 __ push(r1); // Constructor function.
Steve Blocka7e24c12009-10-30 11:49:00 +0000667
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000668 // Try to allocate the object without transitioning into C code. If any of
669 // the preconditions is not met, the code bails out to the runtime call.
670 Label rt_call, allocated;
671 if (FLAG_inline_new) {
672 Label undo_allocation;
Steve Blocka7e24c12009-10-30 11:49:00 +0000673#ifdef ENABLE_DEBUGGER_SUPPORT
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000674 ExternalReference debug_step_in_fp =
675 ExternalReference::debug_step_in_fp_address(isolate);
676 __ mov(r2, Operand(debug_step_in_fp));
677 __ ldr(r2, MemOperand(r2));
678 __ tst(r2, r2);
679 __ b(ne, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000680#endif
681
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000682 // Load the initial map and verify that it is in fact a map.
683 // r1: constructor function
684 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
685 __ JumpIfSmi(r2, &rt_call);
686 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
687 __ b(ne, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000688
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000689 // Check that the constructor is not constructing a JSFunction (see
690 // comments in Runtime_NewObject in runtime.cc). In which case the
691 // initial map's instance type would be JS_FUNCTION_TYPE.
692 // r1: constructor function
693 // r2: initial map
694 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
695 __ b(eq, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000696
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100697 if (count_constructions) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000698 Label allocate;
699 // Decrease generous allocation count.
700 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
701 MemOperand constructor_count =
702 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
703 __ ldrb(r4, constructor_count);
704 __ sub(r4, r4, Operand(1), SetCC);
705 __ strb(r4, constructor_count);
706 __ b(ne, &allocate);
707
708 __ Push(r1, r2);
709
710 __ push(r1); // constructor
711 // The call will replace the stub, so the countdown is only done once.
712 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
713
714 __ pop(r2);
715 __ pop(r1);
716
717 __ bind(&allocate);
718 }
719
720 // Now allocate the JSObject on the heap.
721 // r1: constructor function
722 // r2: initial map
723 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
724 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
725
726 // Allocated the JSObject, now initialize the fields. Map is set to
727 // initial map and properties and elements are set to empty fixed array.
728 // r1: constructor function
729 // r2: initial map
730 // r3: object size
731 // r4: JSObject (not tagged)
732 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
733 __ mov(r5, r4);
734 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
735 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
736 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
737 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
738 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
739 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
740
741 // Fill all the in-object properties with the appropriate filler.
742 // r1: constructor function
743 // r2: initial map
744 // r3: object size (in words)
745 // r4: JSObject (not tagged)
746 // r5: First in-object property of JSObject (not tagged)
747 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
748 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
749 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
750 if (count_constructions) {
751 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
752 __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
753 kBitsPerByte);
754 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
755 // r0: offset of first field after pre-allocated fields
756 if (FLAG_debug_code) {
757 __ cmp(r0, r6);
758 __ Assert(le, "Unexpected number of pre-allocated property fields.");
759 }
760 __ InitializeFieldsWithFiller(r5, r0, r7);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100761 // To allow for truncation.
762 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100763 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000764 __ InitializeFieldsWithFiller(r5, r6, r7);
765
766 // Add the object tag to make the JSObject real, so that we can continue
767 // and jump into the continuation code at any time from now on. Any
768 // failures need to undo the allocation, so that the heap is in a
769 // consistent state and verifiable.
770 __ add(r4, r4, Operand(kHeapObjectTag));
771
772 // Check if a non-empty properties array is needed. Continue with
773 // allocated object if not fall through to runtime call if it is.
774 // r1: constructor function
775 // r4: JSObject
776 // r5: start of next object (not tagged)
777 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
778 // The field instance sizes contains both pre-allocated property fields
779 // and in-object properties.
780 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
781 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
782 kBitsPerByte);
783 __ add(r3, r3, Operand(r6));
784 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
785 kBitsPerByte);
786 __ sub(r3, r3, Operand(r6), SetCC);
787
788 // Done if no extra properties are to be allocated.
789 __ b(eq, &allocated);
790 __ Assert(pl, "Property allocation count failed.");
791
792 // Scale the number of elements by pointer size and add the header for
793 // FixedArrays to the start of the next object calculation from above.
794 // r1: constructor
795 // r3: number of elements in properties array
796 // r4: JSObject
797 // r5: start of next object
798 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
799 __ AllocateInNewSpace(
800 r0,
801 r5,
802 r6,
803 r2,
804 &undo_allocation,
805 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
806
807 // Initialize the FixedArray.
808 // r1: constructor
809 // r3: number of elements in properties array
810 // r4: JSObject
811 // r5: FixedArray (not tagged)
812 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
813 __ mov(r2, r5);
814 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
815 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
816 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
817 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
818 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
819
820 // Initialize the fields to undefined.
821 // r1: constructor function
822 // r2: First element of FixedArray (not tagged)
823 // r3: number of elements in properties array
824 // r4: JSObject
825 // r5: FixedArray (not tagged)
826 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
827 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
828 { Label loop, entry;
829 if (count_constructions) {
830 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
831 } else if (FLAG_debug_code) {
832 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
833 __ cmp(r7, r8);
834 __ Assert(eq, "Undefined value not loaded.");
835 }
836 __ b(&entry);
837 __ bind(&loop);
838 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
839 __ bind(&entry);
840 __ cmp(r2, r6);
841 __ b(lt, &loop);
842 }
843
844 // Store the initialized FixedArray into the properties field of
845 // the JSObject
846 // r1: constructor function
847 // r4: JSObject
848 // r5: FixedArray (not tagged)
849 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
850 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
851
852 // Continue with JSObject being successfully allocated
853 // r1: constructor function
854 // r4: JSObject
855 __ jmp(&allocated);
856
857 // Undo the setting of the new top so that the heap is verifiable. For
858 // example, the map's unused properties potentially do not match the
859 // allocated objects unused properties.
860 // r4: JSObject (previous new top)
861 __ bind(&undo_allocation);
862 __ UndoAllocationInNewSpace(r4, r5);
Steve Blocka7e24c12009-10-30 11:49:00 +0000863 }
864
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000865 // Allocate the new receiver object using the runtime call.
Steve Blocka7e24c12009-10-30 11:49:00 +0000866 // r1: constructor function
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000867 __ bind(&rt_call);
868 __ push(r1); // argument for Runtime_NewObject
869 __ CallRuntime(Runtime::kNewObject, 1);
870 __ mov(r4, r0);
871
872 // Receiver for constructor call allocated.
Steve Blocka7e24c12009-10-30 11:49:00 +0000873 // r4: JSObject
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000874 __ bind(&allocated);
875 __ push(r4);
Steve Blocka7e24c12009-10-30 11:49:00 +0000876
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000877 // Push the function and the allocated receiver from the stack.
878 // sp[0]: receiver (newly allocated object)
879 // sp[1]: constructor function
880 // sp[2]: number of arguments (smi-tagged)
881 __ ldr(r1, MemOperand(sp, kPointerSize));
882 __ push(r1); // Constructor function.
883 __ push(r4); // Receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000884
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000885 // Reload the number of arguments from the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +0000886 // r1: constructor function
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000887 // sp[0]: receiver
888 // sp[1]: constructor function
889 // sp[2]: receiver
890 // sp[3]: constructor function
891 // sp[4]: number of arguments (smi-tagged)
892 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
893
894 // Setup pointer to last argument.
895 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
896
897 // Setup number of arguments for function call below
898 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
899
900 // Copy arguments and receiver to the expression stack.
901 // r0: number of arguments
902 // r2: address of last argument (caller sp)
903 // r1: constructor function
904 // r3: number of arguments (smi-tagged)
905 // sp[0]: receiver
906 // sp[1]: constructor function
907 // sp[2]: receiver
908 // sp[3]: constructor function
909 // sp[4]: number of arguments (smi-tagged)
910 Label loop, entry;
911 __ b(&entry);
912 __ bind(&loop);
913 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
914 __ push(ip);
915 __ bind(&entry);
916 __ sub(r3, r3, Operand(2), SetCC);
917 __ b(ge, &loop);
918
919 // Call the function.
920 // r0: number of arguments
921 // r1: constructor function
922 if (is_api_function) {
923 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
924 Handle<Code> code =
925 masm->isolate()->builtins()->HandleApiCallConstruct();
926 ParameterCount expected(0);
927 __ InvokeCode(code, expected, expected,
928 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
929 } else {
930 ParameterCount actual(r0);
931 __ InvokeFunction(r1, actual, CALL_FUNCTION,
932 NullCallWrapper(), CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000933 }
934
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000935 // Pop the function from the stack.
936 // sp[0]: constructor function
937 // sp[2]: receiver
938 // sp[3]: constructor function
939 // sp[4]: number of arguments (smi-tagged)
940 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +0000941
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000942 // Restore context from the frame.
943 // r0: result
944 // sp[0]: receiver
945 // sp[1]: constructor function
946 // sp[2]: number of arguments (smi-tagged)
947 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000948
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000949 // If the result is an object (in the ECMA sense), we should get rid
950 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
951 // on page 74.
952 Label use_receiver, exit;
953
954 // If the result is a smi, it is *not* an object in the ECMA sense.
955 // r0: result
956 // sp[0]: receiver (newly allocated object)
957 // sp[1]: constructor function
958 // sp[2]: number of arguments (smi-tagged)
959 __ JumpIfSmi(r0, &use_receiver);
960
961 // If the type of the result (stored in its map) is less than
962 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
963 __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
964 __ b(ge, &exit);
965
966 // Throw away the result of the constructor invocation and use the
967 // on-stack receiver as the result.
968 __ bind(&use_receiver);
969 __ ldr(r0, MemOperand(sp));
970
971 // Remove receiver from the stack, remove caller arguments, and
972 // return.
973 __ bind(&exit);
974 // r0: result
975 // sp[0]: receiver (newly allocated object)
976 // sp[1]: constructor function
977 // sp[2]: number of arguments (smi-tagged)
978 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
979
980 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000981 }
982
Steve Blocka7e24c12009-10-30 11:49:00 +0000983 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
984 __ add(sp, sp, Operand(kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100985 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000986 __ Jump(lr);
987}
988
989
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100990void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
991 Generate_JSConstructStubHelper(masm, false, true);
992}
993
994
Leon Clarkee46be812010-01-19 14:06:41 +0000995void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100996 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000997}
998
999
1000void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001001 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +00001002}
1003
1004
Steve Blocka7e24c12009-10-30 11:49:00 +00001005static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1006 bool is_construct) {
1007 // Called from Generate_JS_Entry
1008 // r0: code entry
1009 // r1: function
1010 // r2: receiver
1011 // r3: argc
1012 // r4: argv
1013 // r5-r7, cp may be clobbered
1014
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001015 // Clear the context before we push it when entering the internal frame.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001016 __ mov(cp, Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +00001017
1018 // Enter an internal frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001019 {
1020 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +00001021
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001022 // Set up the context from the function argument.
1023 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001024
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001025 // Set up the roots register.
1026 ExternalReference roots_array_start =
1027 ExternalReference::roots_array_start(masm->isolate());
1028 __ mov(r10, Operand(roots_array_start));
Steve Blocka7e24c12009-10-30 11:49:00 +00001029
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001030 // Push the function and the receiver onto the stack.
1031 __ push(r1);
1032 __ push(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001033
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001034 // Copy arguments to the stack in a loop.
1035 // r1: function
1036 // r3: argc
1037 // r4: argv, i.e. points to first arg
1038 Label loop, entry;
1039 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
1040 // r2 points past last arg.
1041 __ b(&entry);
1042 __ bind(&loop);
1043 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
1044 __ ldr(r0, MemOperand(r0)); // dereference handle
1045 __ push(r0); // push parameter
1046 __ bind(&entry);
1047 __ cmp(r4, r2);
1048 __ b(ne, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00001049
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001050 // Initialize all JavaScript callee-saved registers, since they will be seen
1051 // by the garbage collector as part of handlers.
1052 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1053 __ mov(r5, Operand(r4));
1054 __ mov(r6, Operand(r4));
1055 __ mov(r7, Operand(r4));
1056 if (kR9Available == 1) {
1057 __ mov(r9, Operand(r4));
1058 }
1059
1060 // Invoke the code and pass argc as r0.
1061 __ mov(r0, Operand(r3));
1062 if (is_construct) {
1063 __ Call(masm->isolate()->builtins()->JSConstructCall());
1064 } else {
1065 ParameterCount actual(r0);
1066 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1067 NullCallWrapper(), CALL_AS_METHOD);
1068 }
1069 // Exit the JS frame and remove the parameters (except function), and
1070 // return.
1071 // Respect ABI stack constraint.
Steve Blocka7e24c12009-10-30 11:49:00 +00001072 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001073 __ Jump(lr);
1074
1075 // r0: result
1076}
1077
1078
1079void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1080 Generate_JSEntryTrampolineHelper(masm, false);
1081}
1082
1083
1084void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1085 Generate_JSEntryTrampolineHelper(masm, true);
1086}
1087
1088
Iain Merrick75681382010-08-19 15:07:18 +01001089void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1090 // Enter an internal frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001091 {
1092 FrameScope scope(masm, StackFrame::INTERNAL);
Iain Merrick75681382010-08-19 15:07:18 +01001093
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001094 // Preserve the function.
1095 __ push(r1);
1096 // Push call kind information.
1097 __ push(r5);
Iain Merrick75681382010-08-19 15:07:18 +01001098
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001099 // Push the function on the stack as the argument to the runtime function.
1100 __ push(r1);
1101 __ CallRuntime(Runtime::kLazyCompile, 1);
1102 // Calculate the entry point.
1103 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00001104
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001105 // Restore call kind information.
1106 __ pop(r5);
1107 // Restore saved function.
1108 __ pop(r1);
Iain Merrick75681382010-08-19 15:07:18 +01001109
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001110 // Tear down internal frame.
1111 }
Iain Merrick75681382010-08-19 15:07:18 +01001112
1113 // Do a tail-call of the compiled function.
1114 __ Jump(r2);
1115}
1116
1117
Ben Murdochb0fe1622011-05-05 13:52:32 +01001118void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1119 // Enter an internal frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001120 {
1121 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001122
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001123 // Preserve the function.
1124 __ push(r1);
1125 // Push call kind information.
1126 __ push(r5);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001127
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001128 // Push the function on the stack as the argument to the runtime function.
1129 __ push(r1);
1130 __ CallRuntime(Runtime::kLazyRecompile, 1);
1131 // Calculate the entry point.
1132 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00001133
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001134 // Restore call kind information.
1135 __ pop(r5);
1136 // Restore saved function.
1137 __ pop(r1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001138
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001139 // Tear down internal frame.
1140 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001141
1142 // Do a tail-call of the compiled function.
1143 __ Jump(r2);
1144}
1145
1146
1147static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1148 Deoptimizer::BailoutType type) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001149 {
1150 FrameScope scope(masm, StackFrame::INTERNAL);
1151 // Pass the function and deoptimization type to the runtime system.
1152 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1153 __ push(r0);
1154 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1155 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156
1157 // Get the full codegen state from the stack and untag it -> r6.
1158 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1159 __ SmiUntag(r6);
1160 // Switch on the state.
1161 Label with_tos_register, unknown_state;
1162 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1163 __ b(ne, &with_tos_register);
1164 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1165 __ Ret();
1166
1167 __ bind(&with_tos_register);
1168 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1169 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1170 __ b(ne, &unknown_state);
1171 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1172 __ Ret();
1173
1174 __ bind(&unknown_state);
1175 __ stop("no cases left");
1176}
1177
1178
1179void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1180 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1181}
1182
1183
1184void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1185 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1186}
1187
1188
1189void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001190 // For now, we are relying on the fact that Runtime::NotifyOSR
1191 // doesn't do any garbage collection which allows us to save/restore
1192 // the registers without worrying about which of them contain
1193 // pointers. This seems a bit fragile.
1194 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001195 {
1196 FrameScope scope(masm, StackFrame::INTERNAL);
1197 __ CallRuntime(Runtime::kNotifyOSR, 0);
1198 }
Steve Block1e0659c2011-05-24 12:43:12 +01001199 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1200 __ Ret();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001201}
1202
1203
1204void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001205 CpuFeatures::TryForceFeatureScope scope(VFP3);
1206 if (!CpuFeatures::IsSupported(VFP3)) {
1207 __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1208 return;
1209 }
Steve Block1e0659c2011-05-24 12:43:12 +01001210
1211 // Lookup the function in the JavaScript frame and push it as an
1212 // argument to the on-stack replacement function.
1213 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001214 {
1215 FrameScope scope(masm, StackFrame::INTERNAL);
1216 __ push(r0);
1217 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1218 }
Steve Block1e0659c2011-05-24 12:43:12 +01001219
1220 // If the result was -1 it means that we couldn't optimize the
1221 // function. Just return and continue in the unoptimized version.
1222 Label skip;
1223 __ cmp(r0, Operand(Smi::FromInt(-1)));
1224 __ b(ne, &skip);
1225 __ Ret();
1226
1227 __ bind(&skip);
1228 // Untag the AST id and push it on the stack.
1229 __ SmiUntag(r0);
1230 __ push(r0);
1231
1232 // Generate the code for doing the frame-to-frame translation using
1233 // the deoptimizer infrastructure.
1234 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1235 generator.Generate();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001236}
1237
1238
Steve Blocka7e24c12009-10-30 11:49:00 +00001239void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1240 // 1. Make sure we have at least one argument.
Andrei Popescu402d9372010-02-26 13:31:12 +00001241 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +00001242 { Label done;
1243 __ tst(r0, Operand(r0));
1244 __ b(ne, &done);
1245 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1246 __ push(r2);
1247 __ add(r0, r0, Operand(1));
1248 __ bind(&done);
1249 }
1250
Andrei Popescu402d9372010-02-26 13:31:12 +00001251 // 2. Get the function to call (passed as receiver) from the stack, check
1252 // if it is a function.
1253 // r0: actual number of arguments
Ben Murdoch589d6972011-11-30 16:04:58 +00001254 Label slow, non_function;
Andrei Popescu402d9372010-02-26 13:31:12 +00001255 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001256 __ JumpIfSmi(r1, &non_function);
Andrei Popescu402d9372010-02-26 13:31:12 +00001257 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
Ben Murdoch589d6972011-11-30 16:04:58 +00001258 __ b(ne, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00001259
Andrei Popescu402d9372010-02-26 13:31:12 +00001260 // 3a. Patch the first argument if necessary when calling a function.
Steve Blocka7e24c12009-10-30 11:49:00 +00001261 // r0: actual number of arguments
1262 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +00001263 Label shift_arguments;
Ben Murdoch589d6972011-11-30 16:04:58 +00001264 __ mov(r4, Operand(0, RelocInfo::NONE)); // indicate regular JS_FUNCTION
Andrei Popescu402d9372010-02-26 13:31:12 +00001265 { Label convert_to_object, use_global_receiver, patch_receiver;
1266 // Change context eagerly in case we need the global receiver.
1267 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1268
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001269 // Do not transform the receiver for strict mode functions.
1270 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001271 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1272 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1273 kSmiTagSize)));
1274 __ b(ne, &shift_arguments);
1275
1276 // Do not transform the receiver for native (Compilerhints already in r3).
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001277 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001278 __ b(ne, &shift_arguments);
1279
1280 // Compute the receiver in non-strict mode.
Steve Blocka7e24c12009-10-30 11:49:00 +00001281 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1282 __ ldr(r2, MemOperand(r2, -kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001283 // r0: actual number of arguments
1284 // r1: function
1285 // r2: first argument
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001286 __ JumpIfSmi(r2, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +00001287
Steve Blocka7e24c12009-10-30 11:49:00 +00001288 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1289 __ cmp(r2, r3);
1290 __ b(eq, &use_global_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00001291 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1292 __ cmp(r2, r3);
1293 __ b(eq, &use_global_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001294
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001295 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1296 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001297 __ b(ge, &shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001298
Andrei Popescu402d9372010-02-26 13:31:12 +00001299 __ bind(&convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +00001300
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001301 {
1302 // Enter an internal frame in order to preserve argument count.
1303 FrameScope scope(masm, StackFrame::INTERNAL);
1304 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1305 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001306
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001307 __ push(r2);
1308 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1309 __ mov(r2, r0);
1310
1311 __ pop(r0);
1312 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1313
1314 // Exit the internal frame.
1315 }
1316
Ben Murdoch589d6972011-11-30 16:04:58 +00001317 // Restore the function to r1, and the flag to r4.
Andrei Popescu402d9372010-02-26 13:31:12 +00001318 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Ben Murdoch589d6972011-11-30 16:04:58 +00001319 __ mov(r4, Operand(0, RelocInfo::NONE));
Andrei Popescu402d9372010-02-26 13:31:12 +00001320 __ jmp(&patch_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001321
Andrei Popescu402d9372010-02-26 13:31:12 +00001322 // Use the global receiver object from the called function as the
1323 // receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00001324 __ bind(&use_global_receiver);
1325 const int kGlobalIndex =
1326 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1327 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
Steve Blockd0582a62009-12-15 09:54:21 +00001328 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
1329 __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
Steve Blocka7e24c12009-10-30 11:49:00 +00001330 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
1331
1332 __ bind(&patch_receiver);
1333 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1334 __ str(r2, MemOperand(r3, -kPointerSize));
1335
Andrei Popescu402d9372010-02-26 13:31:12 +00001336 __ jmp(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001337 }
1338
Ben Murdoch589d6972011-11-30 16:04:58 +00001339 // 3b. Check for function proxy.
1340 __ bind(&slow);
1341 __ mov(r4, Operand(1, RelocInfo::NONE)); // indicate function proxy
1342 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1343 __ b(eq, &shift_arguments);
1344 __ bind(&non_function);
1345 __ mov(r4, Operand(2, RelocInfo::NONE)); // indicate non-function
1346
1347 // 3c. Patch the first argument when calling a non-function. The
Andrei Popescu402d9372010-02-26 13:31:12 +00001348 // CALL_NON_FUNCTION builtin expects the non-function callee as
1349 // receiver, so overwrite the first argument which will ultimately
1350 // become the receiver.
1351 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +00001352 // r1: function
Ben Murdoch589d6972011-11-30 16:04:58 +00001353 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
Andrei Popescu402d9372010-02-26 13:31:12 +00001354 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1355 __ str(r1, MemOperand(r2, -kPointerSize));
Andrei Popescu402d9372010-02-26 13:31:12 +00001356
1357 // 4. Shift arguments and return address one slot down on the stack
1358 // (overwriting the original receiver). Adjust argument count to make
1359 // the original first argument the new receiver.
1360 // r0: actual number of arguments
1361 // r1: function
Ben Murdoch589d6972011-11-30 16:04:58 +00001362 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
Andrei Popescu402d9372010-02-26 13:31:12 +00001363 __ bind(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 { Label loop;
1365 // Calculate the copy start address (destination). Copy end address is sp.
1366 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001367
1368 __ bind(&loop);
1369 __ ldr(ip, MemOperand(r2, -kPointerSize));
1370 __ str(ip, MemOperand(r2));
1371 __ sub(r2, r2, Operand(kPointerSize));
1372 __ cmp(r2, sp);
1373 __ b(ne, &loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001374 // Adjust the actual number of arguments and remove the top element
1375 // (which is a copy of the last argument).
1376 __ sub(r0, r0, Operand(1));
1377 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001378 }
1379
Ben Murdoch589d6972011-11-30 16:04:58 +00001380 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1381 // or a function proxy via CALL_FUNCTION_PROXY.
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 // r0: actual number of arguments
1383 // r1: function
Ben Murdoch589d6972011-11-30 16:04:58 +00001384 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1385 { Label function, non_proxy;
1386 __ tst(r4, r4);
1387 __ b(eq, &function);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001388 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1389 __ mov(r2, Operand(0, RelocInfo::NONE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001390 __ SetCallKind(r5, CALL_AS_METHOD);
Ben Murdoch589d6972011-11-30 16:04:58 +00001391 __ cmp(r4, Operand(1));
1392 __ b(ne, &non_proxy);
1393
1394 __ push(r1); // re-add proxy object as additional argument
1395 __ add(r0, r0, Operand(1));
1396 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1397 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1398 RelocInfo::CODE_TARGET);
1399
1400 __ bind(&non_proxy);
1401 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01001402 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1403 RelocInfo::CODE_TARGET);
Andrei Popescu402d9372010-02-26 13:31:12 +00001404 __ bind(&function);
Steve Blocka7e24c12009-10-30 11:49:00 +00001405 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001406
1407 // 5b. Get the code to call from the function and check that the number of
1408 // expected arguments matches what we're providing. If so, jump
1409 // (tail-call) to the code in register edx without checking arguments.
1410 // r0: actual number of arguments
1411 // r1: function
1412 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1413 __ ldr(r2,
1414 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001415 __ mov(r2, Operand(r2, ASR, kSmiTagSize));
Steve Block791712a2010-08-27 10:21:07 +01001416 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001417 __ SetCallKind(r5, CALL_AS_METHOD);
Andrei Popescu402d9372010-02-26 13:31:12 +00001418 __ cmp(r2, r0); // Check formal and actual parameter counts.
Steve Block44f0eee2011-05-26 01:26:41 +01001419 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1420 RelocInfo::CODE_TARGET,
1421 ne);
Andrei Popescu402d9372010-02-26 13:31:12 +00001422
1423 ParameterCount expected(0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001424 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1425 NullCallWrapper(), CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +00001426}
1427
1428
1429void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1430 const int kIndexOffset = -5 * kPointerSize;
1431 const int kLimitOffset = -4 * kPointerSize;
1432 const int kArgsOffset = 2 * kPointerSize;
1433 const int kRecvOffset = 3 * kPointerSize;
1434 const int kFunctionOffset = 4 * kPointerSize;
1435
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001436 {
1437 FrameScope frame_scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +00001438
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001439 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1440 __ push(r0);
1441 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1442 __ push(r0);
1443 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00001444
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001445 // Check the stack for overflow. We are not trying to catch
1446 // interruptions (e.g. debug break and preemption) here, so the "real stack
1447 // limit" is checked.
1448 Label okay;
1449 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1450 // Make r2 the space we have left. The stack might already be overflowed
1451 // here which will cause r2 to become negative.
1452 __ sub(r2, sp, r2);
1453 // Check if the arguments will overflow the stack.
1454 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1455 __ b(gt, &okay); // Signed comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +00001456
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001457 // Out of stack space.
1458 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1459 __ push(r1);
1460 __ push(r0);
1461 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1462 // End of stack check.
Steve Blocka7e24c12009-10-30 11:49:00 +00001463
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001464 // Push current limit and index.
1465 __ bind(&okay);
1466 __ push(r0); // limit
1467 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
1468 __ push(r1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001469
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001470 // Get the receiver.
1471 __ ldr(r0, MemOperand(fp, kRecvOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001472
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001473 // Check that the function is a JS function (otherwise it must be a proxy).
1474 Label push_receiver;
1475 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1476 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1477 __ b(ne, &push_receiver);
Ben Murdoch589d6972011-11-30 16:04:58 +00001478
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001479 // Change context eagerly to get the right global object if necessary.
1480 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1481 // Load the shared function info while the function is still in r1.
1482 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +00001483
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001484 // Compute the receiver.
1485 // Do not transform the receiver for strict mode functions.
1486 Label call_to_object, use_global_receiver;
1487 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1488 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1489 kSmiTagSize)));
1490 __ b(ne, &push_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00001491
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001492 // Do not transform the receiver for strict mode functions.
1493 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1494 __ b(ne, &push_receiver);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001495
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001496 // Compute the receiver in non-strict mode.
1497 __ JumpIfSmi(r0, &call_to_object);
1498 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1499 __ cmp(r0, r1);
1500 __ b(eq, &use_global_receiver);
1501 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1502 __ cmp(r0, r1);
1503 __ b(eq, &use_global_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001504
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001505 // Check if the receiver is already a JavaScript object.
1506 // r0: receiver
1507 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1508 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1509 __ b(ge, &push_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001510
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001511 // Convert the receiver to a regular object.
1512 // r0: receiver
1513 __ bind(&call_to_object);
1514 __ push(r0);
1515 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1516 __ b(&push_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001517
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001518 // Use the current global receiver object as the receiver.
1519 __ bind(&use_global_receiver);
1520 const int kGlobalOffset =
1521 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1522 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1523 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1524 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1525 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001526
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001527 // Push the receiver.
1528 // r0: receiver
1529 __ bind(&push_receiver);
1530 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001531
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001532 // Copy all arguments from the array to the stack.
1533 Label entry, loop;
1534 __ ldr(r0, MemOperand(fp, kIndexOffset));
1535 __ b(&entry);
Steve Blocka7e24c12009-10-30 11:49:00 +00001536
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001537 // Load the current argument from the arguments array and push it to the
1538 // stack.
1539 // r0: current argument index
1540 __ bind(&loop);
1541 __ ldr(r1, MemOperand(fp, kArgsOffset));
1542 __ push(r1);
1543 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001544
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001545 // Call the runtime to access the property in the arguments array.
1546 __ CallRuntime(Runtime::kGetProperty, 2);
1547 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001548
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001549 // Use inline caching to access the arguments.
1550 __ ldr(r0, MemOperand(fp, kIndexOffset));
1551 __ add(r0, r0, Operand(1 << kSmiTagSize));
1552 __ str(r0, MemOperand(fp, kIndexOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001553
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001554 // Test if the copy loop has finished copying all the elements from the
1555 // arguments object.
1556 __ bind(&entry);
1557 __ ldr(r1, MemOperand(fp, kLimitOffset));
1558 __ cmp(r0, r1);
1559 __ b(ne, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00001560
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001561 // Invoke the function.
1562 Label call_proxy;
1563 ParameterCount actual(r0);
1564 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1565 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1566 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1567 __ b(ne, &call_proxy);
1568 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1569 NullCallWrapper(), CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +00001570
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001571 frame_scope.GenerateLeaveFrame();
1572 __ add(sp, sp, Operand(3 * kPointerSize));
1573 __ Jump(lr);
Ben Murdoch589d6972011-11-30 16:04:58 +00001574
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001575 // Invoke the function proxy.
1576 __ bind(&call_proxy);
1577 __ push(r1); // add function proxy as last argument
1578 __ add(r0, r0, Operand(1));
1579 __ mov(r2, Operand(0, RelocInfo::NONE));
1580 __ SetCallKind(r5, CALL_AS_METHOD);
1581 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1582 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1583 RelocInfo::CODE_TARGET);
Ben Murdoch589d6972011-11-30 16:04:58 +00001584
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001585 // Tear down the internal frame and remove function, receiver and args.
1586 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001587 __ add(sp, sp, Operand(3 * kPointerSize));
1588 __ Jump(lr);
Steve Blocka7e24c12009-10-30 11:49:00 +00001589}
1590
1591
1592static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1593 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1594 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1595 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1596 __ add(fp, sp, Operand(3 * kPointerSize));
1597}
1598
1599
1600static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1601 // ----------- S t a t e -------------
1602 // -- r0 : result being passed through
1603 // -----------------------------------
1604 // Get the number of arguments passed (as a smi), tear down the frame and
1605 // then tear down the parameters.
1606 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1607 __ mov(sp, fp);
1608 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1609 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1610 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1611}
1612
1613
1614void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1615 // ----------- S t a t e -------------
1616 // -- r0 : actual number of arguments
1617 // -- r1 : function (passed through to callee)
1618 // -- r2 : expected number of arguments
1619 // -- r3 : code entry to call
Ben Murdoch257744e2011-11-30 15:57:28 +00001620 // -- r5 : call kind information
Steve Blocka7e24c12009-10-30 11:49:00 +00001621 // -----------------------------------
1622
1623 Label invoke, dont_adapt_arguments;
1624
1625 Label enough, too_few;
Steve Block6ded16b2010-05-10 14:33:55 +01001626 __ cmp(r0, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001627 __ b(lt, &too_few);
1628 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1629 __ b(eq, &dont_adapt_arguments);
1630
1631 { // Enough parameters: actual >= expected
1632 __ bind(&enough);
1633 EnterArgumentsAdaptorFrame(masm);
1634
1635 // Calculate copy start address into r0 and copy end address into r2.
1636 // r0: actual number of arguments as a smi
1637 // r1: function
1638 // r2: expected number of arguments
1639 // r3: code entry to call
1640 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1641 // adjust for return address and receiver
1642 __ add(r0, r0, Operand(2 * kPointerSize));
1643 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1644
1645 // Copy the arguments (including the receiver) to the new stack frame.
1646 // r0: copy start address
1647 // r1: function
1648 // r2: copy end address
1649 // r3: code entry to call
1650
1651 Label copy;
1652 __ bind(&copy);
1653 __ ldr(ip, MemOperand(r0, 0));
1654 __ push(ip);
1655 __ cmp(r0, r2); // Compare before moving to next argument.
1656 __ sub(r0, r0, Operand(kPointerSize));
1657 __ b(ne, &copy);
1658
1659 __ b(&invoke);
1660 }
1661
1662 { // Too few parameters: Actual < expected
1663 __ bind(&too_few);
1664 EnterArgumentsAdaptorFrame(masm);
1665
1666 // Calculate copy start address into r0 and copy end address is fp.
1667 // r0: actual number of arguments as a smi
1668 // r1: function
1669 // r2: expected number of arguments
1670 // r3: code entry to call
1671 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1672
1673 // Copy the arguments (including the receiver) to the new stack frame.
1674 // r0: copy start address
1675 // r1: function
1676 // r2: expected number of arguments
1677 // r3: code entry to call
1678 Label copy;
1679 __ bind(&copy);
1680 // Adjust load for return address and receiver.
1681 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1682 __ push(ip);
1683 __ cmp(r0, fp); // Compare before moving to next argument.
1684 __ sub(r0, r0, Operand(kPointerSize));
1685 __ b(ne, &copy);
1686
1687 // Fill the remaining expected arguments with undefined.
1688 // r1: function
1689 // r2: expected number of arguments
1690 // r3: code entry to call
1691 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1692 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1693 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1694
1695 Label fill;
1696 __ bind(&fill);
1697 __ push(ip);
1698 __ cmp(sp, r2);
1699 __ b(ne, &fill);
1700 }
1701
1702 // Call the entry point.
1703 __ bind(&invoke);
1704 __ Call(r3);
1705
1706 // Exit frame and return.
1707 LeaveArgumentsAdaptorFrame(masm);
1708 __ Jump(lr);
1709
1710
1711 // -------------------------------------------
1712 // Dont adapt arguments.
1713 // -------------------------------------------
1714 __ bind(&dont_adapt_arguments);
1715 __ Jump(r3);
1716}
1717
1718
1719#undef __
1720
1721} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001722
1723#endif // V8_TARGET_ARCH_ARM