blob: ae8cb56f9fa80aacf6286a17b3cf7fd866e8961a [file] [log] [blame]
Ben Murdoch85b71792012-04-11 18:30:58 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Ben Murdoch8b112d22011-06-08 16:22:53 +010032#include "codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "debug.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010034#include "deoptimizer.h"
35#include "full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "runtime.h"
37
38namespace v8 {
39namespace internal {
40
41
42#define __ ACCESS_MASM(masm)
43
44
Leon Clarkee46be812010-01-19 14:06:41 +000045void Builtins::Generate_Adaptor(MacroAssembler* masm,
46 CFunctionId id,
47 BuiltinExtraArguments extra_args) {
48 // ----------- S t a t e -------------
49 // -- r0 : number of arguments excluding receiver
50 // -- r1 : called function (only guaranteed when
51 // extra_args requires it)
52 // -- cp : context
53 // -- sp[0] : last argument
54 // -- ...
55 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
56 // -- sp[4 * argc] : receiver
57 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +000058
Leon Clarkee46be812010-01-19 14:06:41 +000059 // Insert extra arguments.
60 int num_extra_args = 0;
61 if (extra_args == NEEDS_CALLED_FUNCTION) {
62 num_extra_args = 1;
63 __ push(r1);
64 } else {
65 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
66 }
67
Steve Block6ded16b2010-05-10 14:33:55 +010068 // JumpToExternalReference expects r0 to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000069 // including the receiver and the extra arguments.
70 __ add(r0, r0, Operand(num_extra_args + 1));
Steve Block44f0eee2011-05-26 01:26:41 +010071 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000072}
73
74
75// Load the built-in Array function from the current context.
76static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
77 // Load the global context.
78
79 __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
80 __ ldr(result,
81 FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
82 // Load the Array function from the global context.
83 __ ldr(result,
84 MemOperand(result,
85 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
86}
87
88
Ben Murdoch85b71792012-04-11 18:30:58 +010089// This constant has the same value as JSArray::kPreallocatedArrayElements and
90// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding
91// below should be reconsidered.
92static const int kLoopUnfoldLimit = 4;
93
94
Steve Blocka7e24c12009-10-30 11:49:00 +000095// Allocate an empty JSArray. The allocated array is put into the result
96// register. An elements backing store is allocated with size initial_capacity
97// and filled with the hole values.
98static void AllocateEmptyJSArray(MacroAssembler* masm,
99 Register array_function,
100 Register result,
101 Register scratch1,
102 Register scratch2,
103 Register scratch3,
Ben Murdoch85b71792012-04-11 18:30:58 +0100104 int initial_capacity,
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 Label* gc_required) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100106 ASSERT(initial_capacity > 0);
107 // Load the initial map from the array function.
108 __ ldr(scratch1, FieldMemOperand(array_function,
109 JSFunction::kPrototypeOrInitialMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000110
111 // Allocate the JSArray object together with space for a fixed array with the
112 // requested elements.
Ben Murdoch85b71792012-04-11 18:30:58 +0100113 int size = JSArray::kSize + FixedArray::SizeFor(initial_capacity);
Kristian Monsen25f61362010-05-21 11:50:48 +0100114 __ AllocateInNewSpace(size,
Steve Blocka7e24c12009-10-30 11:49:00 +0000115 result,
116 scratch2,
117 scratch3,
118 gc_required,
119 TAG_OBJECT);
120
121 // Allocated the JSArray. Now initialize the fields except for the elements
122 // array.
123 // result: JSObject
124 // scratch1: initial map
125 // scratch2: start of next object
126 __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
127 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
128 __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
129 // Field JSArray::kElementsOffset is initialized later.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100130 __ mov(scratch3, Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
132
133 // Calculate the location of the elements array and set elements array member
134 // of the JSArray.
135 // result: JSObject
136 // scratch2: start of next object
Leon Clarkef7060e22010-06-03 12:02:55 +0100137 __ add(scratch1, result, Operand(JSArray::kSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000138 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
139
140 // Clear the heap tag on the elements array.
Ben Murdoch85b71792012-04-11 18:30:58 +0100141 STATIC_ASSERT(kSmiTag == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100142 __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000143
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100144 // Initialize the FixedArray and fill it with holes. FixedArray length is
Steve Blocka7e24c12009-10-30 11:49:00 +0000145 // stored as a smi.
146 // result: JSObject
147 // scratch1: elements array (untagged)
148 // scratch2: start of next object
149 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
Ben Murdoch85b71792012-04-11 18:30:58 +0100150 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000151 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100152 __ mov(scratch3, Operand(Smi::FromInt(initial_capacity)));
Ben Murdoch85b71792012-04-11 18:30:58 +0100153 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
155
Ben Murdoch85b71792012-04-11 18:30:58 +0100156 // Fill the FixedArray with the hole value.
157 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
158 ASSERT(initial_capacity <= kLoopUnfoldLimit);
Steve Blocka7e24c12009-10-30 11:49:00 +0000159 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
Ben Murdoch85b71792012-04-11 18:30:58 +0100160 for (int i = 0; i < initial_capacity; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000161 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
162 }
163}
164
165// Allocate a JSArray with the number of elements stored in a register. The
166// register array_function holds the built-in Array function and the register
167// array_size holds the size of the array as a smi. The allocated array is put
168// into the result register and beginning and end of the FixedArray elements
169// storage is put into registers elements_array_storage and elements_array_end
170// (see below for when that is not the case). If the parameter fill_with_holes
171// is true the allocated elements backing store is filled with the hole values
172// otherwise it is left uninitialized. When the backing store is filled the
173// register elements_array_storage is scratched.
174static void AllocateJSArray(MacroAssembler* masm,
175 Register array_function, // Array function.
Ben Murdoch85b71792012-04-11 18:30:58 +0100176 Register array_size, // As a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000177 Register result,
178 Register elements_array_storage,
179 Register elements_array_end,
180 Register scratch1,
181 Register scratch2,
182 bool fill_with_hole,
183 Label* gc_required) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100184 Label not_empty, allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +0000185
Ben Murdoch85b71792012-04-11 18:30:58 +0100186 // Load the initial map from the array function.
187 __ ldr(elements_array_storage,
188 FieldMemOperand(array_function,
189 JSFunction::kPrototypeOrInitialMapOffset));
190
191 // Check whether an empty sized array is requested.
192 __ tst(array_size, array_size);
193 __ b(ne, &not_empty);
194
195 // If an empty array is requested allocate a small elements array anyway. This
196 // keeps the code below free of special casing for the empty array.
197 int size = JSArray::kSize +
198 FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
199 __ AllocateInNewSpace(size,
200 result,
201 elements_array_end,
202 scratch1,
203 gc_required,
204 TAG_OBJECT);
205 __ jmp(&allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +0000206
207 // Allocate the JSArray object together with space for a FixedArray with the
208 // requested number of elements.
Ben Murdoch85b71792012-04-11 18:30:58 +0100209 __ bind(&not_empty);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000210 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000211 __ mov(elements_array_end,
212 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
213 __ add(elements_array_end,
214 elements_array_end,
215 Operand(array_size, ASR, kSmiTagSize));
Kristian Monsen25f61362010-05-21 11:50:48 +0100216 __ AllocateInNewSpace(
217 elements_array_end,
218 result,
219 scratch1,
220 scratch2,
221 gc_required,
222 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Blocka7e24c12009-10-30 11:49:00 +0000223
224 // Allocated the JSArray. Now initialize the fields except for the elements
225 // array.
226 // result: JSObject
227 // elements_array_storage: initial map
228 // array_size: size of array (smi)
Ben Murdoch85b71792012-04-11 18:30:58 +0100229 __ bind(&allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +0000230 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
231 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
232 __ str(elements_array_storage,
233 FieldMemOperand(result, JSArray::kPropertiesOffset));
234 // Field JSArray::kElementsOffset is initialized later.
235 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
236
237 // Calculate the location of the elements array and set elements array member
238 // of the JSArray.
239 // result: JSObject
240 // array_size: size of array (smi)
241 __ add(elements_array_storage, result, Operand(JSArray::kSize));
242 __ str(elements_array_storage,
243 FieldMemOperand(result, JSArray::kElementsOffset));
244
245 // Clear the heap tag on the elements array.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000246 STATIC_ASSERT(kSmiTag == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100247 __ sub(elements_array_storage,
248 elements_array_storage,
249 Operand(kHeapObjectTag));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100250 // Initialize the fixed array and fill it with holes. FixedArray length is
Steve Blocka7e24c12009-10-30 11:49:00 +0000251 // stored as a smi.
252 // result: JSObject
253 // elements_array_storage: elements array (untagged)
254 // array_size: size of array (smi)
Steve Blocka7e24c12009-10-30 11:49:00 +0000255 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
256 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
257 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000258 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +0100259 __ tst(array_size, array_size);
260 // Length of the FixedArray is the number of pre-allocated elements if
261 // the actual JSArray has length 0 and the size of the JSArray for non-empty
262 // JSArrays. The length of a FixedArray is stored as a smi.
263 __ mov(array_size,
264 Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)),
265 LeaveCC,
266 eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000267 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
268 __ str(array_size,
269 MemOperand(elements_array_storage, kPointerSize, PostIndex));
270
271 // Calculate elements array and elements array end.
272 // result: JSObject
273 // elements_array_storage: elements array element storage
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100274 // array_size: smi-tagged size of elements array
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000275 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000276 __ add(elements_array_end,
277 elements_array_storage,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100278 Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000279
280 // Fill the allocated FixedArray with the hole value if requested.
281 // result: JSObject
282 // elements_array_storage: elements array element storage
283 // elements_array_end: start of next object
284 if (fill_with_hole) {
285 Label loop, entry;
286 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
287 __ jmp(&entry);
288 __ bind(&loop);
289 __ str(scratch1,
290 MemOperand(elements_array_storage, kPointerSize, PostIndex));
291 __ bind(&entry);
292 __ cmp(elements_array_storage, elements_array_end);
293 __ b(lt, &loop);
294 }
295}
296
297// Create a new array for the built-in Array function. This function allocates
298// the JSArray object and the FixedArray elements array and initializes these.
299// If the Array cannot be constructed in native code the runtime is called. This
300// function assumes the following state:
301// r0: argc
302// r1: constructor (built-in Array function)
303// lr: return address
304// sp[0]: last argument
305// This function is used for both construct and normal calls of Array. The only
306// difference between handling a construct call and a normal call is that for a
307// construct call the constructor function in r1 needs to be preserved for
308// entering the generic code. In both cases argc in r0 needs to be preserved.
309// Both registers are preserved by this code so no need to differentiate between
310// construct call and normal call.
311static void ArrayNativeCode(MacroAssembler* masm,
Steve Blockd0582a62009-12-15 09:54:21 +0000312 Label* call_generic_code) {
Steve Block44f0eee2011-05-26 01:26:41 +0100313 Counters* counters = masm->isolate()->counters();
Ben Murdoch85b71792012-04-11 18:30:58 +0100314 Label argc_one_or_more, argc_two_or_more;
Steve Blocka7e24c12009-10-30 11:49:00 +0000315
316 // Check for array construction with zero arguments or one.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100317 __ cmp(r0, Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +0000318 __ b(ne, &argc_one_or_more);
319
320 // Handle construction of an empty array.
321 AllocateEmptyJSArray(masm,
322 r1,
323 r2,
324 r3,
325 r4,
326 r5,
Ben Murdoch85b71792012-04-11 18:30:58 +0100327 JSArray::kPreallocatedArrayElements,
Steve Blocka7e24c12009-10-30 11:49:00 +0000328 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100329 __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
Ben Murdoch85b71792012-04-11 18:30:58 +0100330 // Setup return value, remove receiver from stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +0000331 __ mov(r0, r2);
332 __ add(sp, sp, Operand(kPointerSize));
333 __ Jump(lr);
334
335 // Check for one argument. Bail out if argument is not smi or if it is
336 // negative.
337 __ bind(&argc_one_or_more);
338 __ cmp(r0, Operand(1));
339 __ b(ne, &argc_two_or_more);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000340 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000341 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
342 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
343 __ b(ne, call_generic_code);
344
345 // Handle construction of an empty array of a certain size. Bail out if size
346 // is too large to actually allocate an elements array.
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000347 STATIC_ASSERT(kSmiTag == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000348 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
349 __ b(ge, call_generic_code);
350
351 // r0: argc
352 // r1: constructor
353 // r2: array_size (smi)
354 // sp[0]: argument
355 AllocateJSArray(masm,
356 r1,
357 r2,
358 r3,
359 r4,
360 r5,
361 r6,
362 r7,
363 true,
364 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100365 __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
Ben Murdoch85b71792012-04-11 18:30:58 +0100366 // Setup return value, remove receiver and argument from stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +0000367 __ mov(r0, r3);
368 __ add(sp, sp, Operand(2 * kPointerSize));
369 __ Jump(lr);
370
371 // Handle construction of an array from a list of arguments.
372 __ bind(&argc_two_or_more);
373 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
374
375 // r0: argc
376 // r1: constructor
377 // r2: array_size (smi)
378 // sp[0]: last argument
379 AllocateJSArray(masm,
380 r1,
381 r2,
382 r3,
383 r4,
384 r5,
385 r6,
386 r7,
387 false,
388 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100389 __ IncrementCounter(counters->array_function_native(), 1, r2, r6);
Steve Blocka7e24c12009-10-30 11:49:00 +0000390
391 // Fill arguments as array elements. Copy from the top of the stack (last
392 // element) to the array backing store filling it backwards. Note:
393 // elements_array_end points after the backing store therefore PreIndex is
394 // used when filling the backing store.
395 // r0: argc
396 // r3: JSArray
397 // r4: elements_array storage start (untagged)
398 // r5: elements_array_end (untagged)
399 // sp[0]: last argument
400 Label loop, entry;
401 __ jmp(&entry);
402 __ bind(&loop);
Ben Murdoch85b71792012-04-11 18:30:58 +0100403 __ ldr(r2, MemOperand(sp, kPointerSize, PostIndex));
Steve Blocka7e24c12009-10-30 11:49:00 +0000404 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
405 __ bind(&entry);
406 __ cmp(r4, r5);
407 __ b(lt, &loop);
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +0100408
Steve Blocka7e24c12009-10-30 11:49:00 +0000409 // Remove caller arguments and receiver from the stack, setup return value and
410 // return.
411 // r0: argc
412 // r3: JSArray
413 // sp[0]: receiver
414 __ add(sp, sp, Operand(kPointerSize));
415 __ mov(r0, r3);
416 __ Jump(lr);
417}
418
419
420void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
421 // ----------- S t a t e -------------
422 // -- r0 : number of arguments
423 // -- lr : return address
424 // -- sp[...]: constructor arguments
425 // -----------------------------------
426 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
427
428 // Get the Array function.
429 GenerateLoadArrayFunction(masm, r1);
430
431 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100432 // Initial map for the builtin Array functions should be maps.
Steve Blocka7e24c12009-10-30 11:49:00 +0000433 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
434 __ tst(r2, Operand(kSmiTagMask));
435 __ Assert(ne, "Unexpected initial map for Array function");
436 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
437 __ Assert(eq, "Unexpected initial map for Array function");
438 }
439
440 // Run the native code for the Array function called as a normal function.
441 ArrayNativeCode(masm, &generic_array_code);
442
443 // Jump to the generic array code if the specialized code cannot handle
444 // the construction.
445 __ bind(&generic_array_code);
Steve Block44f0eee2011-05-26 01:26:41 +0100446
447 Handle<Code> array_code =
448 masm->isolate()->builtins()->ArrayCodeGeneric();
Steve Blocka7e24c12009-10-30 11:49:00 +0000449 __ Jump(array_code, RelocInfo::CODE_TARGET);
450}
451
452
453void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
454 // ----------- S t a t e -------------
455 // -- r0 : number of arguments
456 // -- r1 : constructor function
457 // -- lr : return address
458 // -- sp[...]: constructor arguments
459 // -----------------------------------
460 Label generic_constructor;
461
462 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100463 // The array construct code is only set for the builtin and internal
464 // Array functions which always have a map.
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 // Initial map for the builtin Array function should be a map.
466 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
467 __ tst(r2, Operand(kSmiTagMask));
468 __ Assert(ne, "Unexpected initial map for Array function");
469 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
470 __ Assert(eq, "Unexpected initial map for Array function");
471 }
472
473 // Run the native code for the Array function called as a constructor.
474 ArrayNativeCode(masm, &generic_constructor);
475
476 // Jump to the generic construct code in case the specialized code cannot
477 // handle the construction.
478 __ bind(&generic_constructor);
Steve Block44f0eee2011-05-26 01:26:41 +0100479 Handle<Code> generic_construct_stub =
480 masm->isolate()->builtins()->JSConstructStubGeneric();
Steve Blocka7e24c12009-10-30 11:49:00 +0000481 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
482}
483
484
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100485void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800486 // ----------- S t a t e -------------
487 // -- r0 : number of arguments
488 // -- r1 : constructor function
489 // -- lr : return address
490 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
491 // -- sp[argc * 4] : receiver
492 // -----------------------------------
Steve Block44f0eee2011-05-26 01:26:41 +0100493 Counters* counters = masm->isolate()->counters();
494 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800495
496 Register function = r1;
497 if (FLAG_debug_code) {
498 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
499 __ cmp(function, Operand(r2));
500 __ Assert(eq, "Unexpected String function");
501 }
502
503 // Load the first arguments in r0 and get rid of the rest.
504 Label no_arguments;
Ben Murdochb8e0da22011-05-16 14:20:40 +0100505 __ cmp(r0, Operand(0, RelocInfo::NONE));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800506 __ b(eq, &no_arguments);
507 // First args = sp[(argc - 1) * 4].
508 __ sub(r0, r0, Operand(1));
509 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
510 // sp now point to args[0], drop args[0] + receiver.
511 __ Drop(2);
512
513 Register argument = r2;
514 Label not_cached, argument_is_string;
515 NumberToStringStub::GenerateLookupNumberStringCache(
516 masm,
517 r0, // Input.
518 argument, // Result.
519 r3, // Scratch.
520 r4, // Scratch.
521 r5, // Scratch.
522 false, // Is it a Smi?
523 &not_cached);
Steve Block44f0eee2011-05-26 01:26:41 +0100524 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800525 __ bind(&argument_is_string);
526
527 // ----------- S t a t e -------------
528 // -- r2 : argument converted to string
529 // -- r1 : constructor function
530 // -- lr : return address
531 // -----------------------------------
532
533 Label gc_required;
534 __ AllocateInNewSpace(JSValue::kSize,
535 r0, // Result.
536 r3, // Scratch.
537 r4, // Scratch.
538 &gc_required,
539 TAG_OBJECT);
540
541 // Initialising the String Object.
542 Register map = r3;
543 __ LoadGlobalFunctionInitialMap(function, map, r4);
544 if (FLAG_debug_code) {
545 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
546 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
547 __ Assert(eq, "Unexpected string wrapper instance size");
548 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100549 __ cmp(r4, Operand(0, RelocInfo::NONE));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800550 __ Assert(eq, "Unexpected unused properties of string wrapper");
551 }
552 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
553
554 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
555 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
556 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
557
558 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
559
560 // Ensure the object is fully initialized.
561 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
562
563 __ Ret();
564
565 // The argument was not found in the number to string cache. Check
566 // if it's a string already before calling the conversion builtin.
567 Label convert_argument;
568 __ bind(&not_cached);
Steve Block1e0659c2011-05-24 12:43:12 +0100569 __ JumpIfSmi(r0, &convert_argument);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800570
571 // Is it a String?
572 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
573 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000574 STATIC_ASSERT(kNotStringTag != 0);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800575 __ tst(r3, Operand(kIsNotStringMask));
576 __ b(ne, &convert_argument);
577 __ mov(argument, r0);
Steve Block44f0eee2011-05-26 01:26:41 +0100578 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800579 __ b(&argument_is_string);
580
581 // Invoke the conversion builtin and put the result into r2.
582 __ bind(&convert_argument);
583 __ push(function); // Preserve the function.
Steve Block44f0eee2011-05-26 01:26:41 +0100584 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
Ben Murdoch85b71792012-04-11 18:30:58 +0100585 __ EnterInternalFrame();
586 __ push(r0);
587 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
588 __ LeaveInternalFrame();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800589 __ pop(function);
590 __ mov(argument, r0);
591 __ b(&argument_is_string);
592
593 // Load the empty string into r2, remove the receiver from the
594 // stack, and jump back to the case where the argument is a string.
595 __ bind(&no_arguments);
596 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
597 __ Drop(1);
598 __ b(&argument_is_string);
599
600 // At this point the argument is already a string. Call runtime to
601 // create a string wrapper.
602 __ bind(&gc_required);
Steve Block44f0eee2011-05-26 01:26:41 +0100603 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
Ben Murdoch85b71792012-04-11 18:30:58 +0100604 __ EnterInternalFrame();
605 __ push(argument);
606 __ CallRuntime(Runtime::kNewStringWrapper, 1);
607 __ LeaveInternalFrame();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800608 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100609}
610
611
Ben Murdoch85b71792012-04-11 18:30:58 +0100612void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000613 // ----------- S t a t e -------------
614 // -- r0 : number of arguments
615 // -- r1 : constructor function
616 // -- lr : return address
617 // -- sp[...]: constructor arguments
618 // -----------------------------------
619
Ben Murdoch85b71792012-04-11 18:30:58 +0100620 Label non_function_call;
621 // Check that the function is not a smi.
622 __ JumpIfSmi(r1, &non_function_call);
623 // Check that the function is a JSFunction.
624 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
625 __ b(ne, &non_function_call);
626
627 // Jump to the function-specific construct stub.
628 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
629 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
630 __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
631
632 // r0: number of arguments
633 // r1: called object
634 __ bind(&non_function_call);
635 // Set expected number of arguments to zero (not changing r0).
636 __ mov(r2, Operand(0, RelocInfo::NONE));
637 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
638 __ SetCallKind(r5, CALL_AS_METHOD);
639 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
640 RelocInfo::CODE_TARGET);
641}
642
643
644static void Generate_JSConstructStubHelper(MacroAssembler* masm,
645 bool is_api_function,
646 bool count_constructions) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100647 // Should never count constructions for api objects.
648 ASSERT(!is_api_function || !count_constructions);
649
Steve Block44f0eee2011-05-26 01:26:41 +0100650 Isolate* isolate = masm->isolate();
651
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 // Enter a construct frame.
Ben Murdoch85b71792012-04-11 18:30:58 +0100653 __ EnterConstructFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000654
Ben Murdoch85b71792012-04-11 18:30:58 +0100655 // Preserve the two incoming parameters on the stack.
656 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
657 __ push(r0); // Smi-tagged arguments count.
658 __ push(r1); // Constructor function.
Steve Blocka7e24c12009-10-30 11:49:00 +0000659
Ben Murdoch85b71792012-04-11 18:30:58 +0100660 // Try to allocate the object without transitioning into C code. If any of the
661 // preconditions is not met, the code bails out to the runtime call.
662 Label rt_call, allocated;
663 if (FLAG_inline_new) {
664 Label undo_allocation;
Steve Blocka7e24c12009-10-30 11:49:00 +0000665#ifdef ENABLE_DEBUGGER_SUPPORT
Ben Murdoch85b71792012-04-11 18:30:58 +0100666 ExternalReference debug_step_in_fp =
667 ExternalReference::debug_step_in_fp_address(isolate);
668 __ mov(r2, Operand(debug_step_in_fp));
669 __ ldr(r2, MemOperand(r2));
670 __ tst(r2, r2);
671 __ b(ne, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000672#endif
673
Ben Murdoch85b71792012-04-11 18:30:58 +0100674 // Load the initial map and verify that it is in fact a map.
675 // r1: constructor function
676 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
677 __ JumpIfSmi(r2, &rt_call);
678 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
679 __ b(ne, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000680
Ben Murdoch85b71792012-04-11 18:30:58 +0100681 // Check that the constructor is not constructing a JSFunction (see comments
682 // in Runtime_NewObject in runtime.cc). In which case the initial map's
683 // instance type would be JS_FUNCTION_TYPE.
684 // r1: constructor function
685 // r2: initial map
686 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
687 __ b(eq, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000688
Ben Murdoch85b71792012-04-11 18:30:58 +0100689 if (count_constructions) {
690 Label allocate;
691 // Decrease generous allocation count.
692 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
693 MemOperand constructor_count =
694 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
695 __ ldrb(r4, constructor_count);
696 __ sub(r4, r4, Operand(1), SetCC);
697 __ strb(r4, constructor_count);
698 __ b(ne, &allocate);
699
700 __ Push(r1, r2);
701
702 __ push(r1); // constructor
703 // The call will replace the stub, so the countdown is only done once.
704 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
705
706 __ pop(r2);
707 __ pop(r1);
708
709 __ bind(&allocate);
710 }
711
712 // Now allocate the JSObject on the heap.
713 // r1: constructor function
714 // r2: initial map
715 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
716 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
717
718 // Allocated the JSObject, now initialize the fields. Map is set to initial
719 // map and properties and elements are set to empty fixed array.
720 // r1: constructor function
721 // r2: initial map
722 // r3: object size
723 // r4: JSObject (not tagged)
724 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
725 __ mov(r5, r4);
726 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
727 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
728 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
729 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
730 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
731 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
732
733 // Fill all the in-object properties with the appropriate filler.
734 // r1: constructor function
735 // r2: initial map
736 // r3: object size (in words)
737 // r4: JSObject (not tagged)
738 // r5: First in-object property of JSObject (not tagged)
739 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
740 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
741 { Label loop, entry;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100742 if (count_constructions) {
743 // To allow for truncation.
744 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
Ben Murdoch85b71792012-04-11 18:30:58 +0100745 } else {
746 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100747 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100748 __ b(&entry);
749 __ bind(&loop);
750 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
751 __ bind(&entry);
752 __ cmp(r5, r6);
753 __ b(lt, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000754 }
755
Ben Murdoch85b71792012-04-11 18:30:58 +0100756 // Add the object tag to make the JSObject real, so that we can continue and
757 // jump into the continuation code at any time from now on. Any failures
758 // need to undo the allocation, so that the heap is in a consistent state
759 // and verifiable.
760 __ add(r4, r4, Operand(kHeapObjectTag));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000761
Ben Murdoch85b71792012-04-11 18:30:58 +0100762 // Check if a non-empty properties array is needed. Continue with allocated
763 // object if not fall through to runtime call if it is.
764 // r1: constructor function
Steve Blocka7e24c12009-10-30 11:49:00 +0000765 // r4: JSObject
Ben Murdoch85b71792012-04-11 18:30:58 +0100766 // r5: start of next object (not tagged)
767 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
768 // The field instance sizes contains both pre-allocated property fields and
769 // in-object properties.
770 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
771 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
772 __ add(r3, r3, Operand(r6));
773 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
774 __ sub(r3, r3, Operand(r6), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +0000775
Ben Murdoch85b71792012-04-11 18:30:58 +0100776 // Done if no extra properties are to be allocated.
777 __ b(eq, &allocated);
778 __ Assert(pl, "Property allocation count failed.");
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000779
Ben Murdoch85b71792012-04-11 18:30:58 +0100780 // Scale the number of elements by pointer size and add the header for
781 // FixedArrays to the start of the next object calculation from above.
782 // r1: constructor
783 // r3: number of elements in properties array
784 // r4: JSObject
785 // r5: start of next object
786 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
787 __ AllocateInNewSpace(
788 r0,
789 r5,
790 r6,
791 r2,
792 &undo_allocation,
793 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000794
Ben Murdoch85b71792012-04-11 18:30:58 +0100795 // Initialize the FixedArray.
796 // r1: constructor
797 // r3: number of elements in properties array
798 // r4: JSObject
799 // r5: FixedArray (not tagged)
800 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
801 __ mov(r2, r5);
802 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
803 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
804 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
805 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
806 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000807
Ben Murdoch85b71792012-04-11 18:30:58 +0100808 // Initialize the fields to undefined.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000809 // r1: constructor function
Ben Murdoch85b71792012-04-11 18:30:58 +0100810 // r2: First element of FixedArray (not tagged)
811 // r3: number of elements in properties array
812 // r4: JSObject
813 // r5: FixedArray (not tagged)
814 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
815 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
816 { Label loop, entry;
817 if (count_constructions) {
818 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
819 } else if (FLAG_debug_code) {
820 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
821 __ cmp(r7, r8);
822 __ Assert(eq, "Undefined value not loaded.");
823 }
824 __ b(&entry);
825 __ bind(&loop);
826 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
827 __ bind(&entry);
828 __ cmp(r2, r6);
829 __ b(lt, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000830 }
831
Ben Murdoch85b71792012-04-11 18:30:58 +0100832 // Store the initialized FixedArray into the properties field of
833 // the JSObject
834 // r1: constructor function
835 // r4: JSObject
836 // r5: FixedArray (not tagged)
837 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
838 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000839
Ben Murdoch85b71792012-04-11 18:30:58 +0100840 // Continue with JSObject being successfully allocated
841 // r1: constructor function
842 // r4: JSObject
843 __ jmp(&allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +0000844
Ben Murdoch85b71792012-04-11 18:30:58 +0100845 // Undo the setting of the new top so that the heap is verifiable. For
846 // example, the map's unused properties potentially do not match the
847 // allocated objects unused properties.
848 // r4: JSObject (previous new top)
849 __ bind(&undo_allocation);
850 __ UndoAllocationInNewSpace(r4, r5);
Steve Blocka7e24c12009-10-30 11:49:00 +0000851 }
852
Ben Murdoch85b71792012-04-11 18:30:58 +0100853 // Allocate the new receiver object using the runtime call.
854 // r1: constructor function
855 __ bind(&rt_call);
856 __ push(r1); // argument for Runtime_NewObject
857 __ CallRuntime(Runtime::kNewObject, 1);
858 __ mov(r4, r0);
859
860 // Receiver for constructor call allocated.
861 // r4: JSObject
862 __ bind(&allocated);
863 __ push(r4);
864
865 // Push the function and the allocated receiver from the stack.
866 // sp[0]: receiver (newly allocated object)
867 // sp[1]: constructor function
868 // sp[2]: number of arguments (smi-tagged)
869 __ ldr(r1, MemOperand(sp, kPointerSize));
870 __ push(r1); // Constructor function.
871 __ push(r4); // Receiver.
872
873 // Reload the number of arguments from the stack.
874 // r1: constructor function
875 // sp[0]: receiver
876 // sp[1]: constructor function
877 // sp[2]: receiver
878 // sp[3]: constructor function
879 // sp[4]: number of arguments (smi-tagged)
880 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
881
882 // Setup pointer to last argument.
883 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
884
885 // Setup number of arguments for function call below
886 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
887
888 // Copy arguments and receiver to the expression stack.
889 // r0: number of arguments
890 // r2: address of last argument (caller sp)
891 // r1: constructor function
892 // r3: number of arguments (smi-tagged)
893 // sp[0]: receiver
894 // sp[1]: constructor function
895 // sp[2]: receiver
896 // sp[3]: constructor function
897 // sp[4]: number of arguments (smi-tagged)
898 Label loop, entry;
899 __ b(&entry);
900 __ bind(&loop);
901 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
902 __ push(ip);
903 __ bind(&entry);
904 __ sub(r3, r3, Operand(2), SetCC);
905 __ b(ge, &loop);
906
907 // Call the function.
908 // r0: number of arguments
909 // r1: constructor function
910 if (is_api_function) {
911 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
912 Handle<Code> code =
913 masm->isolate()->builtins()->HandleApiCallConstruct();
914 ParameterCount expected(0);
915 __ InvokeCode(code, expected, expected,
916 RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
917 } else {
918 ParameterCount actual(r0);
919 __ InvokeFunction(r1, actual, CALL_FUNCTION,
920 NullCallWrapper(), CALL_AS_METHOD);
921 }
922
923 // Pop the function from the stack.
924 // sp[0]: constructor function
925 // sp[2]: receiver
926 // sp[3]: constructor function
927 // sp[4]: number of arguments (smi-tagged)
928 __ pop();
929
930 // Restore context from the frame.
931 // r0: result
932 // sp[0]: receiver
933 // sp[1]: constructor function
934 // sp[2]: number of arguments (smi-tagged)
935 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
936
937 // If the result is an object (in the ECMA sense), we should get rid
938 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
939 // on page 74.
940 Label use_receiver, exit;
941
942 // If the result is a smi, it is *not* an object in the ECMA sense.
943 // r0: result
944 // sp[0]: receiver (newly allocated object)
945 // sp[1]: constructor function
946 // sp[2]: number of arguments (smi-tagged)
947 __ JumpIfSmi(r0, &use_receiver);
948
949 // If the type of the result (stored in its map) is less than
950 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
951 __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
952 __ b(ge, &exit);
953
954 // Throw away the result of the constructor invocation and use the
955 // on-stack receiver as the result.
956 __ bind(&use_receiver);
957 __ ldr(r0, MemOperand(sp));
958
959 // Remove receiver from the stack, remove caller arguments, and
960 // return.
961 __ bind(&exit);
962 // r0: result
963 // sp[0]: receiver (newly allocated object)
964 // sp[1]: constructor function
965 // sp[2]: number of arguments (smi-tagged)
966 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
967 __ LeaveConstructFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +0000968 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
969 __ add(sp, sp, Operand(kPointerSize));
Steve Block44f0eee2011-05-26 01:26:41 +0100970 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000971 __ Jump(lr);
972}
973
974
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100975void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
976 Generate_JSConstructStubHelper(masm, false, true);
977}
978
979
Leon Clarkee46be812010-01-19 14:06:41 +0000980void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100981 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000982}
983
984
985void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100986 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000987}
988
989
Steve Blocka7e24c12009-10-30 11:49:00 +0000990static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
991 bool is_construct) {
992 // Called from Generate_JS_Entry
993 // r0: code entry
994 // r1: function
995 // r2: receiver
996 // r3: argc
997 // r4: argv
998 // r5-r7, cp may be clobbered
999
Ben Murdoch85b71792012-04-11 18:30:58 +01001000 // Clear the context before we push it when entering the JS frame.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001001 __ mov(cp, Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +00001002
1003 // Enter an internal frame.
Ben Murdoch85b71792012-04-11 18:30:58 +01001004 __ EnterInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00001005
Ben Murdoch85b71792012-04-11 18:30:58 +01001006 // Set up the context from the function argument.
1007 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001008
Ben Murdoch85b71792012-04-11 18:30:58 +01001009 __ InitializeRootRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00001010
Ben Murdoch85b71792012-04-11 18:30:58 +01001011 // Push the function and the receiver onto the stack.
1012 __ push(r1);
1013 __ push(r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001014
Ben Murdoch85b71792012-04-11 18:30:58 +01001015 // Copy arguments to the stack in a loop.
1016 // r1: function
1017 // r3: argc
1018 // r4: argv, i.e. points to first arg
1019 Label loop, entry;
1020 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
1021 // r2 points past last arg.
1022 __ b(&entry);
1023 __ bind(&loop);
1024 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
1025 __ ldr(r0, MemOperand(r0)); // dereference handle
1026 __ push(r0); // push parameter
1027 __ bind(&entry);
1028 __ cmp(r4, r2);
1029 __ b(ne, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00001030
Ben Murdoch85b71792012-04-11 18:30:58 +01001031 // Initialize all JavaScript callee-saved registers, since they will be seen
1032 // by the garbage collector as part of handlers.
1033 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1034 __ mov(r5, Operand(r4));
1035 __ mov(r6, Operand(r4));
1036 __ mov(r7, Operand(r4));
1037 if (kR9Available == 1) {
1038 __ mov(r9, Operand(r4));
Steve Blocka7e24c12009-10-30 11:49:00 +00001039 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001040
1041 // Invoke the code and pass argc as r0.
1042 __ mov(r0, Operand(r3));
1043 if (is_construct) {
1044 __ Call(masm->isolate()->builtins()->JSConstructCall());
1045 } else {
1046 ParameterCount actual(r0);
1047 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1048 NullCallWrapper(), CALL_AS_METHOD);
1049 }
1050
1051 // Exit the JS frame and remove the parameters (except function), and return.
1052 // Respect ABI stack constraint.
1053 __ LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00001054 __ Jump(lr);
1055
1056 // r0: result
1057}
1058
1059
1060void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1061 Generate_JSEntryTrampolineHelper(masm, false);
1062}
1063
1064
1065void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1066 Generate_JSEntryTrampolineHelper(masm, true);
1067}
1068
1069
Iain Merrick75681382010-08-19 15:07:18 +01001070void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1071 // Enter an internal frame.
Ben Murdoch85b71792012-04-11 18:30:58 +01001072 __ EnterInternalFrame();
Iain Merrick75681382010-08-19 15:07:18 +01001073
Ben Murdoch85b71792012-04-11 18:30:58 +01001074 // Preserve the function.
1075 __ push(r1);
1076 // Push call kind information.
1077 __ push(r5);
Iain Merrick75681382010-08-19 15:07:18 +01001078
Ben Murdoch85b71792012-04-11 18:30:58 +01001079 // Push the function on the stack as the argument to the runtime function.
1080 __ push(r1);
1081 __ CallRuntime(Runtime::kLazyCompile, 1);
1082 // Calculate the entry point.
1083 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00001084
Ben Murdoch85b71792012-04-11 18:30:58 +01001085 // Restore call kind information.
1086 __ pop(r5);
1087 // Restore saved function.
1088 __ pop(r1);
Iain Merrick75681382010-08-19 15:07:18 +01001089
Ben Murdoch85b71792012-04-11 18:30:58 +01001090 // Tear down temporary frame.
1091 __ LeaveInternalFrame();
Iain Merrick75681382010-08-19 15:07:18 +01001092
1093 // Do a tail-call of the compiled function.
1094 __ Jump(r2);
1095}
1096
1097
Ben Murdochb0fe1622011-05-05 13:52:32 +01001098void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1099 // Enter an internal frame.
Ben Murdoch85b71792012-04-11 18:30:58 +01001100 __ EnterInternalFrame();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001101
Ben Murdoch85b71792012-04-11 18:30:58 +01001102 // Preserve the function.
1103 __ push(r1);
1104 // Push call kind information.
1105 __ push(r5);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001106
Ben Murdoch85b71792012-04-11 18:30:58 +01001107 // Push the function on the stack as the argument to the runtime function.
1108 __ push(r1);
1109 __ CallRuntime(Runtime::kLazyRecompile, 1);
1110 // Calculate the entry point.
1111 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00001112
Ben Murdoch85b71792012-04-11 18:30:58 +01001113 // Restore call kind information.
1114 __ pop(r5);
1115 // Restore saved function.
1116 __ pop(r1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001117
Ben Murdoch85b71792012-04-11 18:30:58 +01001118 // Tear down temporary frame.
1119 __ LeaveInternalFrame();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001120
1121 // Do a tail-call of the compiled function.
1122 __ Jump(r2);
1123}
1124
1125
1126static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1127 Deoptimizer::BailoutType type) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001128 __ EnterInternalFrame();
1129 // Pass the function and deoptimization type to the runtime system.
1130 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1131 __ push(r0);
1132 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1133 __ LeaveInternalFrame();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001134
1135 // Get the full codegen state from the stack and untag it -> r6.
1136 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1137 __ SmiUntag(r6);
1138 // Switch on the state.
1139 Label with_tos_register, unknown_state;
1140 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1141 __ b(ne, &with_tos_register);
1142 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1143 __ Ret();
1144
1145 __ bind(&with_tos_register);
1146 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1147 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1148 __ b(ne, &unknown_state);
1149 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1150 __ Ret();
1151
1152 __ bind(&unknown_state);
1153 __ stop("no cases left");
1154}
1155
1156
1157void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1158 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1159}
1160
1161
1162void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1163 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1164}
1165
1166
1167void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001168 // For now, we are relying on the fact that Runtime::NotifyOSR
1169 // doesn't do any garbage collection which allows us to save/restore
1170 // the registers without worrying about which of them contain
1171 // pointers. This seems a bit fragile.
1172 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
Ben Murdoch85b71792012-04-11 18:30:58 +01001173 __ EnterInternalFrame();
1174 __ CallRuntime(Runtime::kNotifyOSR, 0);
1175 __ LeaveInternalFrame();
Steve Block1e0659c2011-05-24 12:43:12 +01001176 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1177 __ Ret();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001178}
1179
1180
1181void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001182 CpuFeatures::TryForceFeatureScope scope(VFP3);
1183 if (!CpuFeatures::IsSupported(VFP3)) {
1184 __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1185 return;
1186 }
Steve Block1e0659c2011-05-24 12:43:12 +01001187
1188 // Lookup the function in the JavaScript frame and push it as an
1189 // argument to the on-stack replacement function.
1190 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01001191 __ EnterInternalFrame();
1192 __ push(r0);
1193 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1194 __ LeaveInternalFrame();
Steve Block1e0659c2011-05-24 12:43:12 +01001195
1196 // If the result was -1 it means that we couldn't optimize the
1197 // function. Just return and continue in the unoptimized version.
1198 Label skip;
1199 __ cmp(r0, Operand(Smi::FromInt(-1)));
1200 __ b(ne, &skip);
1201 __ Ret();
1202
1203 __ bind(&skip);
1204 // Untag the AST id and push it on the stack.
1205 __ SmiUntag(r0);
1206 __ push(r0);
1207
1208 // Generate the code for doing the frame-to-frame translation using
1209 // the deoptimizer infrastructure.
1210 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1211 generator.Generate();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001212}
1213
1214
Steve Blocka7e24c12009-10-30 11:49:00 +00001215void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1216 // 1. Make sure we have at least one argument.
Andrei Popescu402d9372010-02-26 13:31:12 +00001217 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +00001218 { Label done;
Ben Murdoch85b71792012-04-11 18:30:58 +01001219 __ tst(r0, Operand(r0));
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 __ b(ne, &done);
1221 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1222 __ push(r2);
1223 __ add(r0, r0, Operand(1));
1224 __ bind(&done);
1225 }
1226
Andrei Popescu402d9372010-02-26 13:31:12 +00001227 // 2. Get the function to call (passed as receiver) from the stack, check
1228 // if it is a function.
1229 // r0: actual number of arguments
Ben Murdoch589d6972011-11-30 16:04:58 +00001230 Label slow, non_function;
Andrei Popescu402d9372010-02-26 13:31:12 +00001231 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001232 __ JumpIfSmi(r1, &non_function);
Andrei Popescu402d9372010-02-26 13:31:12 +00001233 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
Ben Murdoch589d6972011-11-30 16:04:58 +00001234 __ b(ne, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00001235
Andrei Popescu402d9372010-02-26 13:31:12 +00001236 // 3a. Patch the first argument if necessary when calling a function.
Steve Blocka7e24c12009-10-30 11:49:00 +00001237 // r0: actual number of arguments
1238 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +00001239 Label shift_arguments;
Ben Murdoch589d6972011-11-30 16:04:58 +00001240 __ mov(r4, Operand(0, RelocInfo::NONE)); // indicate regular JS_FUNCTION
Andrei Popescu402d9372010-02-26 13:31:12 +00001241 { Label convert_to_object, use_global_receiver, patch_receiver;
1242 // Change context eagerly in case we need the global receiver.
1243 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1244
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001245 // Do not transform the receiver for strict mode functions.
1246 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001247 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1248 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1249 kSmiTagSize)));
1250 __ b(ne, &shift_arguments);
1251
1252 // Do not transform the receiver for native (Compilerhints already in r3).
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001253 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001254 __ b(ne, &shift_arguments);
1255
1256 // Compute the receiver in non-strict mode.
Steve Blocka7e24c12009-10-30 11:49:00 +00001257 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1258 __ ldr(r2, MemOperand(r2, -kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001259 // r0: actual number of arguments
1260 // r1: function
1261 // r2: first argument
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001262 __ JumpIfSmi(r2, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +00001263
Steve Blocka7e24c12009-10-30 11:49:00 +00001264 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1265 __ cmp(r2, r3);
1266 __ b(eq, &use_global_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00001267 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1268 __ cmp(r2, r3);
1269 __ b(eq, &use_global_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001270
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001271 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1272 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001273 __ b(ge, &shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001274
Andrei Popescu402d9372010-02-26 13:31:12 +00001275 __ bind(&convert_to_object);
Ben Murdoch85b71792012-04-11 18:30:58 +01001276 __ EnterInternalFrame(); // In order to preserve argument count.
1277 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1278 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001279
Ben Murdoch85b71792012-04-11 18:30:58 +01001280 __ push(r2);
1281 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1282 __ mov(r2, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001283
Ben Murdoch85b71792012-04-11 18:30:58 +01001284 __ pop(r0);
1285 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1286 __ LeaveInternalFrame();
Ben Murdoch589d6972011-11-30 16:04:58 +00001287 // Restore the function to r1, and the flag to r4.
Andrei Popescu402d9372010-02-26 13:31:12 +00001288 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Ben Murdoch589d6972011-11-30 16:04:58 +00001289 __ mov(r4, Operand(0, RelocInfo::NONE));
Andrei Popescu402d9372010-02-26 13:31:12 +00001290 __ jmp(&patch_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001291
Andrei Popescu402d9372010-02-26 13:31:12 +00001292 // Use the global receiver object from the called function as the
1293 // receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00001294 __ bind(&use_global_receiver);
1295 const int kGlobalIndex =
1296 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1297 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
Steve Blockd0582a62009-12-15 09:54:21 +00001298 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
1299 __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
Steve Blocka7e24c12009-10-30 11:49:00 +00001300 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
1301
1302 __ bind(&patch_receiver);
1303 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1304 __ str(r2, MemOperand(r3, -kPointerSize));
1305
Andrei Popescu402d9372010-02-26 13:31:12 +00001306 __ jmp(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001307 }
1308
Ben Murdoch589d6972011-11-30 16:04:58 +00001309 // 3b. Check for function proxy.
1310 __ bind(&slow);
1311 __ mov(r4, Operand(1, RelocInfo::NONE)); // indicate function proxy
1312 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1313 __ b(eq, &shift_arguments);
1314 __ bind(&non_function);
1315 __ mov(r4, Operand(2, RelocInfo::NONE)); // indicate non-function
1316
1317 // 3c. Patch the first argument when calling a non-function. The
Andrei Popescu402d9372010-02-26 13:31:12 +00001318 // CALL_NON_FUNCTION builtin expects the non-function callee as
1319 // receiver, so overwrite the first argument which will ultimately
1320 // become the receiver.
1321 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +00001322 // r1: function
Ben Murdoch589d6972011-11-30 16:04:58 +00001323 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
Andrei Popescu402d9372010-02-26 13:31:12 +00001324 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1325 __ str(r1, MemOperand(r2, -kPointerSize));
Andrei Popescu402d9372010-02-26 13:31:12 +00001326
1327 // 4. Shift arguments and return address one slot down on the stack
1328 // (overwriting the original receiver). Adjust argument count to make
1329 // the original first argument the new receiver.
1330 // r0: actual number of arguments
1331 // r1: function
Ben Murdoch589d6972011-11-30 16:04:58 +00001332 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
Andrei Popescu402d9372010-02-26 13:31:12 +00001333 __ bind(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001334 { Label loop;
1335 // Calculate the copy start address (destination). Copy end address is sp.
1336 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001337
1338 __ bind(&loop);
1339 __ ldr(ip, MemOperand(r2, -kPointerSize));
1340 __ str(ip, MemOperand(r2));
1341 __ sub(r2, r2, Operand(kPointerSize));
1342 __ cmp(r2, sp);
1343 __ b(ne, &loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001344 // Adjust the actual number of arguments and remove the top element
1345 // (which is a copy of the last argument).
1346 __ sub(r0, r0, Operand(1));
1347 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001348 }
1349
Ben Murdoch589d6972011-11-30 16:04:58 +00001350 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1351 // or a function proxy via CALL_FUNCTION_PROXY.
Steve Blocka7e24c12009-10-30 11:49:00 +00001352 // r0: actual number of arguments
1353 // r1: function
Ben Murdoch589d6972011-11-30 16:04:58 +00001354 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1355 { Label function, non_proxy;
1356 __ tst(r4, r4);
1357 __ b(eq, &function);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001358 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1359 __ mov(r2, Operand(0, RelocInfo::NONE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001360 __ SetCallKind(r5, CALL_AS_METHOD);
Ben Murdoch589d6972011-11-30 16:04:58 +00001361 __ cmp(r4, Operand(1));
1362 __ b(ne, &non_proxy);
1363
1364 __ push(r1); // re-add proxy object as additional argument
1365 __ add(r0, r0, Operand(1));
1366 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1367 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1368 RelocInfo::CODE_TARGET);
1369
1370 __ bind(&non_proxy);
1371 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01001372 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1373 RelocInfo::CODE_TARGET);
Andrei Popescu402d9372010-02-26 13:31:12 +00001374 __ bind(&function);
Steve Blocka7e24c12009-10-30 11:49:00 +00001375 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001376
1377 // 5b. Get the code to call from the function and check that the number of
1378 // expected arguments matches what we're providing. If so, jump
1379 // (tail-call) to the code in register edx without checking arguments.
1380 // r0: actual number of arguments
1381 // r1: function
1382 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1383 __ ldr(r2,
1384 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001385 __ mov(r2, Operand(r2, ASR, kSmiTagSize));
Steve Block791712a2010-08-27 10:21:07 +01001386 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001387 __ SetCallKind(r5, CALL_AS_METHOD);
Andrei Popescu402d9372010-02-26 13:31:12 +00001388 __ cmp(r2, r0); // Check formal and actual parameter counts.
Steve Block44f0eee2011-05-26 01:26:41 +01001389 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1390 RelocInfo::CODE_TARGET,
1391 ne);
Andrei Popescu402d9372010-02-26 13:31:12 +00001392
1393 ParameterCount expected(0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001394 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1395 NullCallWrapper(), CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +00001396}
1397
1398
1399void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1400 const int kIndexOffset = -5 * kPointerSize;
1401 const int kLimitOffset = -4 * kPointerSize;
1402 const int kArgsOffset = 2 * kPointerSize;
1403 const int kRecvOffset = 3 * kPointerSize;
1404 const int kFunctionOffset = 4 * kPointerSize;
1405
Ben Murdoch85b71792012-04-11 18:30:58 +01001406 __ EnterInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +00001407
Ben Murdoch85b71792012-04-11 18:30:58 +01001408 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1409 __ push(r0);
1410 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1411 __ push(r0);
1412 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00001413
Ben Murdoch85b71792012-04-11 18:30:58 +01001414 // Check the stack for overflow. We are not trying to catch
1415 // interruptions (e.g. debug break and preemption) here, so the "real stack
1416 // limit" is checked.
1417 Label okay;
1418 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1419 // Make r2 the space we have left. The stack might already be overflowed
1420 // here which will cause r2 to become negative.
1421 __ sub(r2, sp, r2);
1422 // Check if the arguments will overflow the stack.
1423 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1424 __ b(gt, &okay); // Signed comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +00001425
Ben Murdoch85b71792012-04-11 18:30:58 +01001426 // Out of stack space.
1427 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1428 __ push(r1);
1429 __ push(r0);
1430 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1431 // End of stack check.
Steve Blocka7e24c12009-10-30 11:49:00 +00001432
Ben Murdoch85b71792012-04-11 18:30:58 +01001433 // Push current limit and index.
1434 __ bind(&okay);
1435 __ push(r0); // limit
1436 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
1437 __ push(r1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001438
Ben Murdoch85b71792012-04-11 18:30:58 +01001439 // Get the receiver.
1440 __ ldr(r0, MemOperand(fp, kRecvOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001441
Ben Murdoch85b71792012-04-11 18:30:58 +01001442 // Check that the function is a JS function (otherwise it must be a proxy).
1443 Label push_receiver;
1444 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1445 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1446 __ b(ne, &push_receiver);
Ben Murdoch589d6972011-11-30 16:04:58 +00001447
Ben Murdoch85b71792012-04-11 18:30:58 +01001448 // Change context eagerly to get the right global object if necessary.
1449 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1450 // Load the shared function info while the function is still in r1.
1451 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +00001452
Ben Murdoch85b71792012-04-11 18:30:58 +01001453 // Compute the receiver.
1454 // Do not transform the receiver for strict mode functions.
1455 Label call_to_object, use_global_receiver;
1456 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1457 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1458 kSmiTagSize)));
1459 __ b(ne, &push_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00001460
Ben Murdoch85b71792012-04-11 18:30:58 +01001461 // Do not transform the receiver for strict mode functions.
1462 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1463 __ b(ne, &push_receiver);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001464
Ben Murdoch85b71792012-04-11 18:30:58 +01001465 // Compute the receiver in non-strict mode.
1466 __ JumpIfSmi(r0, &call_to_object);
1467 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1468 __ cmp(r0, r1);
1469 __ b(eq, &use_global_receiver);
1470 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1471 __ cmp(r0, r1);
1472 __ b(eq, &use_global_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001473
Ben Murdoch85b71792012-04-11 18:30:58 +01001474 // Check if the receiver is already a JavaScript object.
1475 // r0: receiver
1476 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1477 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1478 __ b(ge, &push_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001479
Ben Murdoch85b71792012-04-11 18:30:58 +01001480 // Convert the receiver to a regular object.
1481 // r0: receiver
1482 __ bind(&call_to_object);
1483 __ push(r0);
1484 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1485 __ b(&push_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00001486
Ben Murdoch85b71792012-04-11 18:30:58 +01001487 // Use the current global receiver object as the receiver.
1488 __ bind(&use_global_receiver);
1489 const int kGlobalOffset =
1490 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1491 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1492 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1493 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1494 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001495
Ben Murdoch85b71792012-04-11 18:30:58 +01001496 // Push the receiver.
1497 // r0: receiver
1498 __ bind(&push_receiver);
1499 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001500
Ben Murdoch85b71792012-04-11 18:30:58 +01001501 // Copy all arguments from the array to the stack.
1502 Label entry, loop;
1503 __ ldr(r0, MemOperand(fp, kIndexOffset));
1504 __ b(&entry);
Steve Blocka7e24c12009-10-30 11:49:00 +00001505
Ben Murdoch85b71792012-04-11 18:30:58 +01001506 // Load the current argument from the arguments array and push it to the
1507 // stack.
1508 // r0: current argument index
1509 __ bind(&loop);
1510 __ ldr(r1, MemOperand(fp, kArgsOffset));
1511 __ push(r1);
1512 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001513
Ben Murdoch85b71792012-04-11 18:30:58 +01001514 // Call the runtime to access the property in the arguments array.
1515 __ CallRuntime(Runtime::kGetProperty, 2);
1516 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00001517
Ben Murdoch85b71792012-04-11 18:30:58 +01001518 // Use inline caching to access the arguments.
1519 __ ldr(r0, MemOperand(fp, kIndexOffset));
1520 __ add(r0, r0, Operand(1 << kSmiTagSize));
1521 __ str(r0, MemOperand(fp, kIndexOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001522
Ben Murdoch85b71792012-04-11 18:30:58 +01001523 // Test if the copy loop has finished copying all the elements from the
1524 // arguments object.
1525 __ bind(&entry);
1526 __ ldr(r1, MemOperand(fp, kLimitOffset));
1527 __ cmp(r0, r1);
1528 __ b(ne, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00001529
Ben Murdoch85b71792012-04-11 18:30:58 +01001530 // Invoke the function.
1531 Label call_proxy;
1532 ParameterCount actual(r0);
1533 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1534 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1535 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1536 __ b(ne, &call_proxy);
1537 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1538 NullCallWrapper(), CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +00001539
Ben Murdoch85b71792012-04-11 18:30:58 +01001540 // Tear down the internal frame and remove function, receiver and args.
1541 __ LeaveInternalFrame();
1542 __ add(sp, sp, Operand(3 * kPointerSize));
1543 __ Jump(lr);
Ben Murdoch589d6972011-11-30 16:04:58 +00001544
Ben Murdoch85b71792012-04-11 18:30:58 +01001545 // Invoke the function proxy.
1546 __ bind(&call_proxy);
1547 __ push(r1); // add function proxy as last argument
1548 __ add(r0, r0, Operand(1));
1549 __ mov(r2, Operand(0, RelocInfo::NONE));
1550 __ SetCallKind(r5, CALL_AS_METHOD);
1551 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1552 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1553 RelocInfo::CODE_TARGET);
Ben Murdoch589d6972011-11-30 16:04:58 +00001554
Ben Murdoch85b71792012-04-11 18:30:58 +01001555 __ LeaveInternalFrame();
Ben Murdoch589d6972011-11-30 16:04:58 +00001556 __ add(sp, sp, Operand(3 * kPointerSize));
1557 __ Jump(lr);
Steve Blocka7e24c12009-10-30 11:49:00 +00001558}
1559
1560
1561static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1562 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1563 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1564 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1565 __ add(fp, sp, Operand(3 * kPointerSize));
1566}
1567
1568
1569static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1570 // ----------- S t a t e -------------
1571 // -- r0 : result being passed through
1572 // -----------------------------------
1573 // Get the number of arguments passed (as a smi), tear down the frame and
1574 // then tear down the parameters.
1575 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1576 __ mov(sp, fp);
1577 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1578 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1579 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1580}
1581
1582
1583void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1584 // ----------- S t a t e -------------
1585 // -- r0 : actual number of arguments
1586 // -- r1 : function (passed through to callee)
1587 // -- r2 : expected number of arguments
1588 // -- r3 : code entry to call
Ben Murdoch257744e2011-11-30 15:57:28 +00001589 // -- r5 : call kind information
Steve Blocka7e24c12009-10-30 11:49:00 +00001590 // -----------------------------------
1591
1592 Label invoke, dont_adapt_arguments;
1593
1594 Label enough, too_few;
Steve Block6ded16b2010-05-10 14:33:55 +01001595 __ cmp(r0, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001596 __ b(lt, &too_few);
1597 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1598 __ b(eq, &dont_adapt_arguments);
1599
1600 { // Enough parameters: actual >= expected
1601 __ bind(&enough);
1602 EnterArgumentsAdaptorFrame(masm);
1603
1604 // Calculate copy start address into r0 and copy end address into r2.
1605 // r0: actual number of arguments as a smi
1606 // r1: function
1607 // r2: expected number of arguments
1608 // r3: code entry to call
1609 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1610 // adjust for return address and receiver
1611 __ add(r0, r0, Operand(2 * kPointerSize));
1612 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1613
1614 // Copy the arguments (including the receiver) to the new stack frame.
1615 // r0: copy start address
1616 // r1: function
1617 // r2: copy end address
1618 // r3: code entry to call
1619
1620 Label copy;
1621 __ bind(&copy);
1622 __ ldr(ip, MemOperand(r0, 0));
1623 __ push(ip);
1624 __ cmp(r0, r2); // Compare before moving to next argument.
1625 __ sub(r0, r0, Operand(kPointerSize));
1626 __ b(ne, &copy);
1627
1628 __ b(&invoke);
1629 }
1630
1631 { // Too few parameters: Actual < expected
1632 __ bind(&too_few);
1633 EnterArgumentsAdaptorFrame(masm);
1634
1635 // Calculate copy start address into r0 and copy end address is fp.
1636 // r0: actual number of arguments as a smi
1637 // r1: function
1638 // r2: expected number of arguments
1639 // r3: code entry to call
1640 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1641
1642 // Copy the arguments (including the receiver) to the new stack frame.
1643 // r0: copy start address
1644 // r1: function
1645 // r2: expected number of arguments
1646 // r3: code entry to call
1647 Label copy;
1648 __ bind(&copy);
1649 // Adjust load for return address and receiver.
1650 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1651 __ push(ip);
1652 __ cmp(r0, fp); // Compare before moving to next argument.
1653 __ sub(r0, r0, Operand(kPointerSize));
1654 __ b(ne, &copy);
1655
1656 // Fill the remaining expected arguments with undefined.
1657 // r1: function
1658 // r2: expected number of arguments
1659 // r3: code entry to call
1660 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1661 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1662 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1663
1664 Label fill;
1665 __ bind(&fill);
1666 __ push(ip);
1667 __ cmp(sp, r2);
1668 __ b(ne, &fill);
1669 }
1670
1671 // Call the entry point.
1672 __ bind(&invoke);
1673 __ Call(r3);
1674
1675 // Exit frame and return.
1676 LeaveArgumentsAdaptorFrame(masm);
1677 __ Jump(lr);
1678
1679
1680 // -------------------------------------------
1681 // Dont adapt arguments.
1682 // -------------------------------------------
1683 __ bind(&dont_adapt_arguments);
1684 __ Jump(r3);
1685}
1686
1687
1688#undef __
1689
1690} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001691
1692#endif // V8_TARGET_ARCH_ARM