blob: 1f776562f27b11d05cc25a4f943e503ae10a5ca7 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
33#include "debug.h"
34#include "runtime.h"
35
36namespace v8 {
37namespace internal {
38
39
40#define __ ACCESS_MASM(masm)
41
42
Leon Clarkee46be812010-01-19 14:06:41 +000043void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id,
45 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e -------------
47 // -- r0 : number of arguments excluding receiver
48 // -- r1 : called function (only guaranteed when
49 // extra_args requires it)
50 // -- cp : context
51 // -- sp[0] : last argument
52 // -- ...
53 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
54 // -- sp[4 * argc] : receiver
55 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +000056
Leon Clarkee46be812010-01-19 14:06:41 +000057 // Insert extra arguments.
58 int num_extra_args = 0;
59 if (extra_args == NEEDS_CALLED_FUNCTION) {
60 num_extra_args = 1;
61 __ push(r1);
62 } else {
63 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
64 }
65
Steve Block6ded16b2010-05-10 14:33:55 +010066 // JumpToExternalReference expects r0 to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000067 // including the receiver and the extra arguments.
68 __ add(r0, r0, Operand(num_extra_args + 1));
Steve Block6ded16b2010-05-10 14:33:55 +010069 __ JumpToExternalReference(ExternalReference(id));
Steve Blocka7e24c12009-10-30 11:49:00 +000070}
71
72
73// Load the built-in Array function from the current context.
74static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
75 // Load the global context.
76
77 __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
78 __ ldr(result,
79 FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
80 // Load the Array function from the global context.
81 __ ldr(result,
82 MemOperand(result,
83 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
84}
85
86
87// This constant has the same value as JSArray::kPreallocatedArrayElements and
88// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding
89// below should be reconsidered.
90static const int kLoopUnfoldLimit = 4;
91
92
93// Allocate an empty JSArray. The allocated array is put into the result
94// register. An elements backing store is allocated with size initial_capacity
95// and filled with the hole values.
96static void AllocateEmptyJSArray(MacroAssembler* masm,
97 Register array_function,
98 Register result,
99 Register scratch1,
100 Register scratch2,
101 Register scratch3,
102 int initial_capacity,
103 Label* gc_required) {
104 ASSERT(initial_capacity > 0);
105 // Load the initial map from the array function.
106 __ ldr(scratch1, FieldMemOperand(array_function,
107 JSFunction::kPrototypeOrInitialMapOffset));
108
109 // Allocate the JSArray object together with space for a fixed array with the
110 // requested elements.
111 int size = JSArray::kSize + FixedArray::SizeFor(initial_capacity);
Kristian Monsen25f61362010-05-21 11:50:48 +0100112 __ AllocateInNewSpace(size,
Steve Blocka7e24c12009-10-30 11:49:00 +0000113 result,
114 scratch2,
115 scratch3,
116 gc_required,
117 TAG_OBJECT);
118
119 // Allocated the JSArray. Now initialize the fields except for the elements
120 // array.
121 // result: JSObject
122 // scratch1: initial map
123 // scratch2: start of next object
124 __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
125 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
126 __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
127 // Field JSArray::kElementsOffset is initialized later.
128 __ mov(scratch3, Operand(0));
129 __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
130
131 // Calculate the location of the elements array and set elements array member
132 // of the JSArray.
133 // result: JSObject
134 // scratch2: start of next object
Leon Clarkef7060e22010-06-03 12:02:55 +0100135 __ add(scratch1, result, Operand(JSArray::kSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000136 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
137
138 // Clear the heap tag on the elements array.
139 __ and_(scratch1, scratch1, Operand(~kHeapObjectTagMask));
140
141 // Initialize the FixedArray and fill it with holes. FixedArray length is not
142 // stored as a smi.
143 // result: JSObject
144 // scratch1: elements array (untagged)
145 // scratch2: start of next object
146 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
147 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
148 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
149 __ mov(scratch3, Operand(initial_capacity));
150 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
151 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
152
153 // Fill the FixedArray with the hole value.
154 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
155 ASSERT(initial_capacity <= kLoopUnfoldLimit);
156 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
157 for (int i = 0; i < initial_capacity; i++) {
158 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
159 }
160}
161
162// Allocate a JSArray with the number of elements stored in a register. The
163// register array_function holds the built-in Array function and the register
164// array_size holds the size of the array as a smi. The allocated array is put
165// into the result register and beginning and end of the FixedArray elements
166// storage is put into registers elements_array_storage and elements_array_end
167// (see below for when that is not the case). If the parameter fill_with_holes
168// is true the allocated elements backing store is filled with the hole values
169// otherwise it is left uninitialized. When the backing store is filled the
170// register elements_array_storage is scratched.
171static void AllocateJSArray(MacroAssembler* masm,
172 Register array_function, // Array function.
173 Register array_size, // As a smi.
174 Register result,
175 Register elements_array_storage,
176 Register elements_array_end,
177 Register scratch1,
178 Register scratch2,
179 bool fill_with_hole,
180 Label* gc_required) {
181 Label not_empty, allocated;
182
183 // Load the initial map from the array function.
184 __ ldr(elements_array_storage,
185 FieldMemOperand(array_function,
186 JSFunction::kPrototypeOrInitialMapOffset));
187
188 // Check whether an empty sized array is requested.
189 __ tst(array_size, array_size);
190 __ b(nz, &not_empty);
191
192 // If an empty array is requested allocate a small elements array anyway. This
193 // keeps the code below free of special casing for the empty array.
194 int size = JSArray::kSize +
195 FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
Kristian Monsen25f61362010-05-21 11:50:48 +0100196 __ AllocateInNewSpace(size,
Steve Blocka7e24c12009-10-30 11:49:00 +0000197 result,
198 elements_array_end,
199 scratch1,
200 gc_required,
201 TAG_OBJECT);
202 __ jmp(&allocated);
203
204 // Allocate the JSArray object together with space for a FixedArray with the
205 // requested number of elements.
206 __ bind(&not_empty);
207 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
208 __ mov(elements_array_end,
209 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
210 __ add(elements_array_end,
211 elements_array_end,
212 Operand(array_size, ASR, kSmiTagSize));
Kristian Monsen25f61362010-05-21 11:50:48 +0100213 __ AllocateInNewSpace(
214 elements_array_end,
215 result,
216 scratch1,
217 scratch2,
218 gc_required,
219 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Blocka7e24c12009-10-30 11:49:00 +0000220
221 // Allocated the JSArray. Now initialize the fields except for the elements
222 // array.
223 // result: JSObject
224 // elements_array_storage: initial map
225 // array_size: size of array (smi)
226 __ bind(&allocated);
227 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
228 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
229 __ str(elements_array_storage,
230 FieldMemOperand(result, JSArray::kPropertiesOffset));
231 // Field JSArray::kElementsOffset is initialized later.
232 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
233
234 // Calculate the location of the elements array and set elements array member
235 // of the JSArray.
236 // result: JSObject
237 // array_size: size of array (smi)
238 __ add(elements_array_storage, result, Operand(JSArray::kSize));
239 __ str(elements_array_storage,
240 FieldMemOperand(result, JSArray::kElementsOffset));
241
242 // Clear the heap tag on the elements array.
243 __ and_(elements_array_storage,
244 elements_array_storage,
245 Operand(~kHeapObjectTagMask));
246 // Initialize the fixed array and fill it with holes. FixedArray length is not
247 // stored as a smi.
248 // result: JSObject
249 // elements_array_storage: elements array (untagged)
250 // array_size: size of array (smi)
251 ASSERT(kSmiTag == 0);
252 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
253 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
254 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
255 // Convert array_size from smi to value.
256 __ mov(array_size,
257 Operand(array_size, ASR, kSmiTagSize));
258 __ tst(array_size, array_size);
259 // Length of the FixedArray is the number of pre-allocated elements if
260 // the actual JSArray has length 0 and the size of the JSArray for non-empty
261 // JSArrays. The length of a FixedArray is not stored as a smi.
262 __ mov(array_size, Operand(JSArray::kPreallocatedArrayElements), LeaveCC, eq);
263 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
264 __ str(array_size,
265 MemOperand(elements_array_storage, kPointerSize, PostIndex));
266
267 // Calculate elements array and elements array end.
268 // result: JSObject
269 // elements_array_storage: elements array element storage
270 // array_size: size of elements array
271 __ add(elements_array_end,
272 elements_array_storage,
273 Operand(array_size, LSL, kPointerSizeLog2));
274
275 // Fill the allocated FixedArray with the hole value if requested.
276 // result: JSObject
277 // elements_array_storage: elements array element storage
278 // elements_array_end: start of next object
279 if (fill_with_hole) {
280 Label loop, entry;
281 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
282 __ jmp(&entry);
283 __ bind(&loop);
284 __ str(scratch1,
285 MemOperand(elements_array_storage, kPointerSize, PostIndex));
286 __ bind(&entry);
287 __ cmp(elements_array_storage, elements_array_end);
288 __ b(lt, &loop);
289 }
290}
291
292// Create a new array for the built-in Array function. This function allocates
293// the JSArray object and the FixedArray elements array and initializes these.
294// If the Array cannot be constructed in native code the runtime is called. This
295// function assumes the following state:
296// r0: argc
297// r1: constructor (built-in Array function)
298// lr: return address
299// sp[0]: last argument
300// This function is used for both construct and normal calls of Array. The only
301// difference between handling a construct call and a normal call is that for a
302// construct call the constructor function in r1 needs to be preserved for
303// entering the generic code. In both cases argc in r0 needs to be preserved.
304// Both registers are preserved by this code so no need to differentiate between
305// construct call and normal call.
306static void ArrayNativeCode(MacroAssembler* masm,
Steve Blockd0582a62009-12-15 09:54:21 +0000307 Label* call_generic_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000308 Label argc_one_or_more, argc_two_or_more;
309
310 // Check for array construction with zero arguments or one.
311 __ cmp(r0, Operand(0));
312 __ b(ne, &argc_one_or_more);
313
314 // Handle construction of an empty array.
315 AllocateEmptyJSArray(masm,
316 r1,
317 r2,
318 r3,
319 r4,
320 r5,
321 JSArray::kPreallocatedArrayElements,
322 call_generic_code);
323 __ IncrementCounter(&Counters::array_function_native, 1, r3, r4);
324 // Setup return value, remove receiver from stack and return.
325 __ mov(r0, r2);
326 __ add(sp, sp, Operand(kPointerSize));
327 __ Jump(lr);
328
329 // Check for one argument. Bail out if argument is not smi or if it is
330 // negative.
331 __ bind(&argc_one_or_more);
332 __ cmp(r0, Operand(1));
333 __ b(ne, &argc_two_or_more);
334 ASSERT(kSmiTag == 0);
335 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
336 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
337 __ b(ne, call_generic_code);
338
339 // Handle construction of an empty array of a certain size. Bail out if size
340 // is too large to actually allocate an elements array.
341 ASSERT(kSmiTag == 0);
342 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
343 __ b(ge, call_generic_code);
344
345 // r0: argc
346 // r1: constructor
347 // r2: array_size (smi)
348 // sp[0]: argument
349 AllocateJSArray(masm,
350 r1,
351 r2,
352 r3,
353 r4,
354 r5,
355 r6,
356 r7,
357 true,
358 call_generic_code);
359 __ IncrementCounter(&Counters::array_function_native, 1, r2, r4);
360 // Setup return value, remove receiver and argument from stack and return.
361 __ mov(r0, r3);
362 __ add(sp, sp, Operand(2 * kPointerSize));
363 __ Jump(lr);
364
365 // Handle construction of an array from a list of arguments.
366 __ bind(&argc_two_or_more);
367 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
368
369 // r0: argc
370 // r1: constructor
371 // r2: array_size (smi)
372 // sp[0]: last argument
373 AllocateJSArray(masm,
374 r1,
375 r2,
376 r3,
377 r4,
378 r5,
379 r6,
380 r7,
381 false,
382 call_generic_code);
383 __ IncrementCounter(&Counters::array_function_native, 1, r2, r6);
384
385 // Fill arguments as array elements. Copy from the top of the stack (last
386 // element) to the array backing store filling it backwards. Note:
387 // elements_array_end points after the backing store therefore PreIndex is
388 // used when filling the backing store.
389 // r0: argc
390 // r3: JSArray
391 // r4: elements_array storage start (untagged)
392 // r5: elements_array_end (untagged)
393 // sp[0]: last argument
394 Label loop, entry;
395 __ jmp(&entry);
396 __ bind(&loop);
397 __ ldr(r2, MemOperand(sp, kPointerSize, PostIndex));
398 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
399 __ bind(&entry);
400 __ cmp(r4, r5);
401 __ b(lt, &loop);
402
403 // Remove caller arguments and receiver from the stack, setup return value and
404 // return.
405 // r0: argc
406 // r3: JSArray
407 // sp[0]: receiver
408 __ add(sp, sp, Operand(kPointerSize));
409 __ mov(r0, r3);
410 __ Jump(lr);
411}
412
413
414void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
415 // ----------- S t a t e -------------
416 // -- r0 : number of arguments
417 // -- lr : return address
418 // -- sp[...]: constructor arguments
419 // -----------------------------------
420 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
421
422 // Get the Array function.
423 GenerateLoadArrayFunction(masm, r1);
424
425 if (FLAG_debug_code) {
426 // Initial map for the builtin Array function shoud be a map.
427 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
428 __ tst(r2, Operand(kSmiTagMask));
429 __ Assert(ne, "Unexpected initial map for Array function");
430 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
431 __ Assert(eq, "Unexpected initial map for Array function");
432 }
433
434 // Run the native code for the Array function called as a normal function.
435 ArrayNativeCode(masm, &generic_array_code);
436
437 // Jump to the generic array code if the specialized code cannot handle
438 // the construction.
439 __ bind(&generic_array_code);
440 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
441 Handle<Code> array_code(code);
442 __ Jump(array_code, RelocInfo::CODE_TARGET);
443}
444
445
446void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
447 // ----------- S t a t e -------------
448 // -- r0 : number of arguments
449 // -- r1 : constructor function
450 // -- lr : return address
451 // -- sp[...]: constructor arguments
452 // -----------------------------------
453 Label generic_constructor;
454
455 if (FLAG_debug_code) {
456 // The array construct code is only set for the builtin Array function which
457 // always have a map.
458 GenerateLoadArrayFunction(masm, r2);
459 __ cmp(r1, r2);
460 __ Assert(eq, "Unexpected Array function");
461 // Initial map for the builtin Array function should be a map.
462 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
463 __ tst(r2, Operand(kSmiTagMask));
464 __ Assert(ne, "Unexpected initial map for Array function");
465 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
466 __ Assert(eq, "Unexpected initial map for Array function");
467 }
468
469 // Run the native code for the Array function called as a constructor.
470 ArrayNativeCode(masm, &generic_constructor);
471
472 // Jump to the generic construct code in case the specialized code cannot
473 // handle the construction.
474 __ bind(&generic_constructor);
475 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
476 Handle<Code> generic_construct_stub(code);
477 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
478}
479
480
481void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
482 // ----------- S t a t e -------------
483 // -- r0 : number of arguments
484 // -- r1 : constructor function
485 // -- lr : return address
486 // -- sp[...]: constructor arguments
487 // -----------------------------------
488
489 Label non_function_call;
490 // Check that the function is not a smi.
491 __ tst(r1, Operand(kSmiTagMask));
492 __ b(eq, &non_function_call);
493 // Check that the function is a JSFunction.
494 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
495 __ b(ne, &non_function_call);
496
497 // Jump to the function-specific construct stub.
498 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
499 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
500 __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
501
502 // r0: number of arguments
503 // r1: called object
504 __ bind(&non_function_call);
Andrei Popescu402d9372010-02-26 13:31:12 +0000505 // CALL_NON_FUNCTION expects the non-function constructor as receiver
506 // (instead of the original receiver from the call site). The receiver is
507 // stack element argc.
508 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000509 // Set expected number of arguments to zero (not changing r0).
510 __ mov(r2, Operand(0));
511 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
512 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
513 RelocInfo::CODE_TARGET);
514}
515
516
Leon Clarkee46be812010-01-19 14:06:41 +0000517static void Generate_JSConstructStubHelper(MacroAssembler* masm,
518 bool is_api_function) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000519 // Enter a construct frame.
520 __ EnterConstructFrame();
521
522 // Preserve the two incoming parameters on the stack.
523 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
524 __ push(r0); // Smi-tagged arguments count.
525 __ push(r1); // Constructor function.
526
527 // Use r7 for holding undefined which is used in several places below.
528 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
529
530 // Try to allocate the object without transitioning into C code. If any of the
531 // preconditions is not met, the code bails out to the runtime call.
532 Label rt_call, allocated;
533 if (FLAG_inline_new) {
534 Label undo_allocation;
535#ifdef ENABLE_DEBUGGER_SUPPORT
536 ExternalReference debug_step_in_fp =
537 ExternalReference::debug_step_in_fp_address();
538 __ mov(r2, Operand(debug_step_in_fp));
539 __ ldr(r2, MemOperand(r2));
540 __ tst(r2, r2);
541 __ b(nz, &rt_call);
542#endif
543
544 // Load the initial map and verify that it is in fact a map.
545 // r1: constructor function
546 // r7: undefined
547 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
548 __ tst(r2, Operand(kSmiTagMask));
549 __ b(eq, &rt_call);
550 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
551 __ b(ne, &rt_call);
552
553 // Check that the constructor is not constructing a JSFunction (see comments
554 // in Runtime_NewObject in runtime.cc). In which case the initial map's
555 // instance type would be JS_FUNCTION_TYPE.
556 // r1: constructor function
557 // r2: initial map
558 // r7: undefined
559 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
560 __ b(eq, &rt_call);
561
562 // Now allocate the JSObject on the heap.
563 // r1: constructor function
564 // r2: initial map
565 // r7: undefined
566 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +0100567 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
Steve Blocka7e24c12009-10-30 11:49:00 +0000568
569 // Allocated the JSObject, now initialize the fields. Map is set to initial
570 // map and properties and elements are set to empty fixed array.
571 // r1: constructor function
572 // r2: initial map
573 // r3: object size
574 // r4: JSObject (not tagged)
575 // r7: undefined
576 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
577 __ mov(r5, r4);
578 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
579 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
580 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
581 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
582 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
583 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
584
585 // Fill all the in-object properties with undefined.
586 // r1: constructor function
587 // r2: initial map
588 // r3: object size (in words)
589 // r4: JSObject (not tagged)
590 // r5: First in-object property of JSObject (not tagged)
591 // r7: undefined
592 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
593 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
594 { Label loop, entry;
595 __ b(&entry);
596 __ bind(&loop);
597 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
598 __ bind(&entry);
Steve Block6ded16b2010-05-10 14:33:55 +0100599 __ cmp(r5, r6);
Steve Blocka7e24c12009-10-30 11:49:00 +0000600 __ b(lt, &loop);
601 }
602
603 // Add the object tag to make the JSObject real, so that we can continue and
604 // jump into the continuation code at any time from now on. Any failures
605 // need to undo the allocation, so that the heap is in a consistent state
606 // and verifiable.
607 __ add(r4, r4, Operand(kHeapObjectTag));
608
609 // Check if a non-empty properties array is needed. Continue with allocated
610 // object if not fall through to runtime call if it is.
611 // r1: constructor function
612 // r4: JSObject
613 // r5: start of next object (not tagged)
614 // r7: undefined
615 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
616 // The field instance sizes contains both pre-allocated property fields and
617 // in-object properties.
618 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
619 __ and_(r6,
620 r0,
621 Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8));
622 __ add(r3, r3, Operand(r6, LSR, Map::kPreAllocatedPropertyFieldsByte * 8));
623 __ and_(r6, r0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8));
624 __ sub(r3, r3, Operand(r6, LSR, Map::kInObjectPropertiesByte * 8), SetCC);
625
626 // Done if no extra properties are to be allocated.
627 __ b(eq, &allocated);
628 __ Assert(pl, "Property allocation count failed.");
629
630 // Scale the number of elements by pointer size and add the header for
631 // FixedArrays to the start of the next object calculation from above.
632 // r1: constructor
633 // r3: number of elements in properties array
634 // r4: JSObject
635 // r5: start of next object
636 // r7: undefined
637 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
Kristian Monsen25f61362010-05-21 11:50:48 +0100638 __ AllocateInNewSpace(
639 r0,
640 r5,
641 r6,
642 r2,
643 &undo_allocation,
644 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
Steve Blocka7e24c12009-10-30 11:49:00 +0000645
646 // Initialize the FixedArray.
647 // r1: constructor
648 // r3: number of elements in properties array
649 // r4: JSObject
650 // r5: FixedArray (not tagged)
651 // r7: undefined
652 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
653 __ mov(r2, r5);
654 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
655 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
656 ASSERT_EQ(1 * kPointerSize, Array::kLengthOffset);
657 __ str(r3, MemOperand(r2, kPointerSize, PostIndex));
658
659 // Initialize the fields to undefined.
660 // r1: constructor function
661 // r2: First element of FixedArray (not tagged)
662 // r3: number of elements in properties array
663 // r4: JSObject
664 // r5: FixedArray (not tagged)
665 // r7: undefined
666 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
667 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
668 { Label loop, entry;
669 __ b(&entry);
670 __ bind(&loop);
671 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
672 __ bind(&entry);
Steve Block6ded16b2010-05-10 14:33:55 +0100673 __ cmp(r2, r6);
Steve Blocka7e24c12009-10-30 11:49:00 +0000674 __ b(lt, &loop);
675 }
676
677 // Store the initialized FixedArray into the properties field of
678 // the JSObject
679 // r1: constructor function
680 // r4: JSObject
681 // r5: FixedArray (not tagged)
682 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
683 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
684
685 // Continue with JSObject being successfully allocated
686 // r1: constructor function
687 // r4: JSObject
688 __ jmp(&allocated);
689
690 // Undo the setting of the new top so that the heap is verifiable. For
691 // example, the map's unused properties potentially do not match the
692 // allocated objects unused properties.
693 // r4: JSObject (previous new top)
694 __ bind(&undo_allocation);
695 __ UndoAllocationInNewSpace(r4, r5);
696 }
697
698 // Allocate the new receiver object using the runtime call.
699 // r1: constructor function
700 __ bind(&rt_call);
701 __ push(r1); // argument for Runtime_NewObject
702 __ CallRuntime(Runtime::kNewObject, 1);
703 __ mov(r4, r0);
704
705 // Receiver for constructor call allocated.
706 // r4: JSObject
707 __ bind(&allocated);
708 __ push(r4);
709
710 // Push the function and the allocated receiver from the stack.
711 // sp[0]: receiver (newly allocated object)
712 // sp[1]: constructor function
713 // sp[2]: number of arguments (smi-tagged)
714 __ ldr(r1, MemOperand(sp, kPointerSize));
715 __ push(r1); // Constructor function.
716 __ push(r4); // Receiver.
717
718 // Reload the number of arguments from the stack.
719 // r1: constructor function
720 // sp[0]: receiver
721 // sp[1]: constructor function
722 // sp[2]: receiver
723 // sp[3]: constructor function
724 // sp[4]: number of arguments (smi-tagged)
725 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
726
727 // Setup pointer to last argument.
728 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
729
730 // Setup number of arguments for function call below
731 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
732
733 // Copy arguments and receiver to the expression stack.
734 // r0: number of arguments
735 // r2: address of last argument (caller sp)
736 // r1: constructor function
737 // r3: number of arguments (smi-tagged)
738 // sp[0]: receiver
739 // sp[1]: constructor function
740 // sp[2]: receiver
741 // sp[3]: constructor function
742 // sp[4]: number of arguments (smi-tagged)
743 Label loop, entry;
744 __ b(&entry);
745 __ bind(&loop);
746 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
747 __ push(ip);
748 __ bind(&entry);
749 __ sub(r3, r3, Operand(2), SetCC);
750 __ b(ge, &loop);
751
752 // Call the function.
753 // r0: number of arguments
754 // r1: constructor function
Leon Clarkee46be812010-01-19 14:06:41 +0000755 if (is_api_function) {
756 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
757 Handle<Code> code = Handle<Code>(
758 Builtins::builtin(Builtins::HandleApiCallConstruct));
759 ParameterCount expected(0);
760 __ InvokeCode(code, expected, expected,
761 RelocInfo::CODE_TARGET, CALL_FUNCTION);
762 } else {
763 ParameterCount actual(r0);
764 __ InvokeFunction(r1, actual, CALL_FUNCTION);
765 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000766
767 // Pop the function from the stack.
768 // sp[0]: constructor function
769 // sp[2]: receiver
770 // sp[3]: constructor function
771 // sp[4]: number of arguments (smi-tagged)
772 __ pop();
773
774 // Restore context from the frame.
775 // r0: result
776 // sp[0]: receiver
777 // sp[1]: constructor function
778 // sp[2]: number of arguments (smi-tagged)
779 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
780
781 // If the result is an object (in the ECMA sense), we should get rid
782 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
783 // on page 74.
784 Label use_receiver, exit;
785
786 // If the result is a smi, it is *not* an object in the ECMA sense.
787 // r0: result
788 // sp[0]: receiver (newly allocated object)
789 // sp[1]: constructor function
790 // sp[2]: number of arguments (smi-tagged)
791 __ tst(r0, Operand(kSmiTagMask));
792 __ b(eq, &use_receiver);
793
794 // If the type of the result (stored in its map) is less than
795 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
796 __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
797 __ b(ge, &exit);
798
799 // Throw away the result of the constructor invocation and use the
800 // on-stack receiver as the result.
801 __ bind(&use_receiver);
802 __ ldr(r0, MemOperand(sp));
803
804 // Remove receiver from the stack, remove caller arguments, and
805 // return.
806 __ bind(&exit);
807 // r0: result
808 // sp[0]: receiver (newly allocated object)
809 // sp[1]: constructor function
810 // sp[2]: number of arguments (smi-tagged)
811 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
812 __ LeaveConstructFrame();
813 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
814 __ add(sp, sp, Operand(kPointerSize));
815 __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
816 __ Jump(lr);
817}
818
819
Leon Clarkee46be812010-01-19 14:06:41 +0000820void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
821 Generate_JSConstructStubHelper(masm, false);
822}
823
824
825void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
826 Generate_JSConstructStubHelper(masm, true);
827}
828
829
Steve Blocka7e24c12009-10-30 11:49:00 +0000830static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
831 bool is_construct) {
832 // Called from Generate_JS_Entry
833 // r0: code entry
834 // r1: function
835 // r2: receiver
836 // r3: argc
837 // r4: argv
838 // r5-r7, cp may be clobbered
839
840 // Clear the context before we push it when entering the JS frame.
841 __ mov(cp, Operand(0));
842
843 // Enter an internal frame.
844 __ EnterInternalFrame();
845
846 // Set up the context from the function argument.
847 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
848
849 // Set up the roots register.
850 ExternalReference roots_address = ExternalReference::roots_address();
851 __ mov(r10, Operand(roots_address));
852
853 // Push the function and the receiver onto the stack.
854 __ push(r1);
855 __ push(r2);
856
857 // Copy arguments to the stack in a loop.
858 // r1: function
859 // r3: argc
860 // r4: argv, i.e. points to first arg
861 Label loop, entry;
862 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
863 // r2 points past last arg.
864 __ b(&entry);
865 __ bind(&loop);
866 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
867 __ ldr(r0, MemOperand(r0)); // dereference handle
868 __ push(r0); // push parameter
869 __ bind(&entry);
Steve Block6ded16b2010-05-10 14:33:55 +0100870 __ cmp(r4, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000871 __ b(ne, &loop);
872
873 // Initialize all JavaScript callee-saved registers, since they will be seen
874 // by the garbage collector as part of handlers.
875 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
876 __ mov(r5, Operand(r4));
877 __ mov(r6, Operand(r4));
878 __ mov(r7, Operand(r4));
879 if (kR9Available == 1) {
880 __ mov(r9, Operand(r4));
881 }
882
883 // Invoke the code and pass argc as r0.
884 __ mov(r0, Operand(r3));
885 if (is_construct) {
886 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
887 RelocInfo::CODE_TARGET);
888 } else {
889 ParameterCount actual(r0);
890 __ InvokeFunction(r1, actual, CALL_FUNCTION);
891 }
892
893 // Exit the JS frame and remove the parameters (except function), and return.
894 // Respect ABI stack constraint.
895 __ LeaveInternalFrame();
896 __ Jump(lr);
897
898 // r0: result
899}
900
901
902void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
903 Generate_JSEntryTrampolineHelper(masm, false);
904}
905
906
907void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
908 Generate_JSEntryTrampolineHelper(masm, true);
909}
910
911
912void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
913 // 1. Make sure we have at least one argument.
Andrei Popescu402d9372010-02-26 13:31:12 +0000914 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000915 { Label done;
916 __ tst(r0, Operand(r0));
917 __ b(ne, &done);
918 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
919 __ push(r2);
920 __ add(r0, r0, Operand(1));
921 __ bind(&done);
922 }
923
Andrei Popescu402d9372010-02-26 13:31:12 +0000924 // 2. Get the function to call (passed as receiver) from the stack, check
925 // if it is a function.
926 // r0: actual number of arguments
927 Label non_function;
928 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
929 __ tst(r1, Operand(kSmiTagMask));
930 __ b(eq, &non_function);
931 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
932 __ b(ne, &non_function);
Steve Blocka7e24c12009-10-30 11:49:00 +0000933
Andrei Popescu402d9372010-02-26 13:31:12 +0000934 // 3a. Patch the first argument if necessary when calling a function.
Steve Blocka7e24c12009-10-30 11:49:00 +0000935 // r0: actual number of arguments
936 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +0000937 Label shift_arguments;
938 { Label convert_to_object, use_global_receiver, patch_receiver;
939 // Change context eagerly in case we need the global receiver.
940 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
941
Steve Blocka7e24c12009-10-30 11:49:00 +0000942 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
943 __ ldr(r2, MemOperand(r2, -kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000944 // r0: actual number of arguments
945 // r1: function
946 // r2: first argument
947 __ tst(r2, Operand(kSmiTagMask));
Andrei Popescu402d9372010-02-26 13:31:12 +0000948 __ b(eq, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000949
950 __ LoadRoot(r3, Heap::kNullValueRootIndex);
951 __ cmp(r2, r3);
952 __ b(eq, &use_global_receiver);
953 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
954 __ cmp(r2, r3);
955 __ b(eq, &use_global_receiver);
956
957 __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +0000958 __ b(lt, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000959 __ cmp(r3, Operand(LAST_JS_OBJECT_TYPE));
Andrei Popescu402d9372010-02-26 13:31:12 +0000960 __ b(le, &shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000961
Andrei Popescu402d9372010-02-26 13:31:12 +0000962 __ bind(&convert_to_object);
963 __ EnterInternalFrame(); // In order to preserve argument count.
964 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000965 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000966
967 __ push(r2);
968 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
969 __ mov(r2, r0);
970
Steve Blocka7e24c12009-10-30 11:49:00 +0000971 __ pop(r0);
972 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000973 __ LeaveInternalFrame();
Andrei Popescu402d9372010-02-26 13:31:12 +0000974 // Restore the function to r1.
975 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
976 __ jmp(&patch_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +0000977
Andrei Popescu402d9372010-02-26 13:31:12 +0000978 // Use the global receiver object from the called function as the
979 // receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000980 __ bind(&use_global_receiver);
981 const int kGlobalIndex =
982 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
983 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
Steve Blockd0582a62009-12-15 09:54:21 +0000984 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
985 __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
Steve Blocka7e24c12009-10-30 11:49:00 +0000986 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
987
988 __ bind(&patch_receiver);
989 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
990 __ str(r2, MemOperand(r3, -kPointerSize));
991
Andrei Popescu402d9372010-02-26 13:31:12 +0000992 __ jmp(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000993 }
994
Andrei Popescu402d9372010-02-26 13:31:12 +0000995 // 3b. Patch the first argument when calling a non-function. The
996 // CALL_NON_FUNCTION builtin expects the non-function callee as
997 // receiver, so overwrite the first argument which will ultimately
998 // become the receiver.
999 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +00001000 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +00001001 __ bind(&non_function);
1002 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1003 __ str(r1, MemOperand(r2, -kPointerSize));
1004 // Clear r1 to indicate a non-function being called.
1005 __ mov(r1, Operand(0));
1006
1007 // 4. Shift arguments and return address one slot down on the stack
1008 // (overwriting the original receiver). Adjust argument count to make
1009 // the original first argument the new receiver.
1010 // r0: actual number of arguments
1011 // r1: function
1012 __ bind(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 { Label loop;
1014 // Calculate the copy start address (destination). Copy end address is sp.
1015 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001016
1017 __ bind(&loop);
1018 __ ldr(ip, MemOperand(r2, -kPointerSize));
1019 __ str(ip, MemOperand(r2));
1020 __ sub(r2, r2, Operand(kPointerSize));
1021 __ cmp(r2, sp);
1022 __ b(ne, &loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001023 // Adjust the actual number of arguments and remove the top element
1024 // (which is a copy of the last argument).
1025 __ sub(r0, r0, Operand(1));
1026 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001027 }
1028
Andrei Popescu402d9372010-02-26 13:31:12 +00001029 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
Steve Blocka7e24c12009-10-30 11:49:00 +00001030 // r0: actual number of arguments
1031 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +00001032 { Label function;
Steve Blocka7e24c12009-10-30 11:49:00 +00001033 __ tst(r1, r1);
Andrei Popescu402d9372010-02-26 13:31:12 +00001034 __ b(ne, &function);
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 __ mov(r2, Operand(0)); // expected arguments is 0 for CALL_NON_FUNCTION
1036 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1037 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
1038 RelocInfo::CODE_TARGET);
Andrei Popescu402d9372010-02-26 13:31:12 +00001039 __ bind(&function);
Steve Blocka7e24c12009-10-30 11:49:00 +00001040 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001041
1042 // 5b. Get the code to call from the function and check that the number of
1043 // expected arguments matches what we're providing. If so, jump
1044 // (tail-call) to the code in register edx without checking arguments.
1045 // r0: actual number of arguments
1046 // r1: function
1047 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1048 __ ldr(r2,
1049 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1050 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
1051 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1052 __ cmp(r2, r0); // Check formal and actual parameter counts.
1053 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
1054 RelocInfo::CODE_TARGET, ne);
1055
1056 ParameterCount expected(0);
1057 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00001058}
1059
1060
1061void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1062 const int kIndexOffset = -5 * kPointerSize;
1063 const int kLimitOffset = -4 * kPointerSize;
1064 const int kArgsOffset = 2 * kPointerSize;
1065 const int kRecvOffset = 3 * kPointerSize;
1066 const int kFunctionOffset = 4 * kPointerSize;
1067
1068 __ EnterInternalFrame();
1069
1070 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1071 __ push(r0);
1072 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1073 __ push(r0);
1074 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_JS);
1075
Steve Blockd0582a62009-12-15 09:54:21 +00001076 // Check the stack for overflow. We are not trying need to catch
1077 // interruptions (e.g. debug break and preemption) here, so the "real stack
1078 // limit" is checked.
Steve Blocka7e24c12009-10-30 11:49:00 +00001079 Label okay;
Steve Blockd0582a62009-12-15 09:54:21 +00001080 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1081 // Make r2 the space we have left. The stack might already be overflowed
1082 // here which will cause r2 to become negative.
Steve Blocka7e24c12009-10-30 11:49:00 +00001083 __ sub(r2, sp, r2);
Steve Blockd0582a62009-12-15 09:54:21 +00001084 // Check if the arguments will overflow the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00001085 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
Steve Blockd0582a62009-12-15 09:54:21 +00001086 __ b(gt, &okay); // Signed comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +00001087
1088 // Out of stack space.
1089 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1090 __ push(r1);
1091 __ push(r0);
1092 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_JS);
Steve Blockd0582a62009-12-15 09:54:21 +00001093 // End of stack check.
Steve Blocka7e24c12009-10-30 11:49:00 +00001094
1095 // Push current limit and index.
1096 __ bind(&okay);
1097 __ push(r0); // limit
1098 __ mov(r1, Operand(0)); // initial index
1099 __ push(r1);
1100
1101 // Change context eagerly to get the right global object if necessary.
1102 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1103 __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
1104
1105 // Compute the receiver.
1106 Label call_to_object, use_global_receiver, push_receiver;
1107 __ ldr(r0, MemOperand(fp, kRecvOffset));
1108 __ tst(r0, Operand(kSmiTagMask));
1109 __ b(eq, &call_to_object);
1110 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1111 __ cmp(r0, r1);
1112 __ b(eq, &use_global_receiver);
1113 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1114 __ cmp(r0, r1);
1115 __ b(eq, &use_global_receiver);
1116
1117 // Check if the receiver is already a JavaScript object.
1118 // r0: receiver
1119 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
1120 __ b(lt, &call_to_object);
1121 __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
1122 __ b(le, &push_receiver);
1123
1124 // Convert the receiver to a regular object.
1125 // r0: receiver
1126 __ bind(&call_to_object);
1127 __ push(r0);
1128 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
1129 __ b(&push_receiver);
1130
1131 // Use the current global receiver object as the receiver.
1132 __ bind(&use_global_receiver);
1133 const int kGlobalOffset =
1134 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1135 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00001136 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1137 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001138 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1139
1140 // Push the receiver.
1141 // r0: receiver
1142 __ bind(&push_receiver);
1143 __ push(r0);
1144
1145 // Copy all arguments from the array to the stack.
1146 Label entry, loop;
1147 __ ldr(r0, MemOperand(fp, kIndexOffset));
1148 __ b(&entry);
1149
1150 // Load the current argument from the arguments array and push it to the
1151 // stack.
1152 // r0: current argument index
1153 __ bind(&loop);
1154 __ ldr(r1, MemOperand(fp, kArgsOffset));
1155 __ push(r1);
1156 __ push(r0);
1157
1158 // Call the runtime to access the property in the arguments array.
1159 __ CallRuntime(Runtime::kGetProperty, 2);
1160 __ push(r0);
1161
1162 // Use inline caching to access the arguments.
1163 __ ldr(r0, MemOperand(fp, kIndexOffset));
1164 __ add(r0, r0, Operand(1 << kSmiTagSize));
1165 __ str(r0, MemOperand(fp, kIndexOffset));
1166
1167 // Test if the copy loop has finished copying all the elements from the
1168 // arguments object.
1169 __ bind(&entry);
1170 __ ldr(r1, MemOperand(fp, kLimitOffset));
1171 __ cmp(r0, r1);
1172 __ b(ne, &loop);
1173
1174 // Invoke the function.
1175 ParameterCount actual(r0);
1176 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1177 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1178 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1179
1180 // Tear down the internal frame and remove function, receiver and args.
1181 __ LeaveInternalFrame();
1182 __ add(sp, sp, Operand(3 * kPointerSize));
1183 __ Jump(lr);
1184}
1185
1186
1187static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1188 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1189 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1190 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1191 __ add(fp, sp, Operand(3 * kPointerSize));
1192}
1193
1194
1195static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1196 // ----------- S t a t e -------------
1197 // -- r0 : result being passed through
1198 // -----------------------------------
1199 // Get the number of arguments passed (as a smi), tear down the frame and
1200 // then tear down the parameters.
1201 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1202 __ mov(sp, fp);
1203 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1204 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1205 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1206}
1207
1208
1209void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1210 // ----------- S t a t e -------------
1211 // -- r0 : actual number of arguments
1212 // -- r1 : function (passed through to callee)
1213 // -- r2 : expected number of arguments
1214 // -- r3 : code entry to call
1215 // -----------------------------------
1216
1217 Label invoke, dont_adapt_arguments;
1218
1219 Label enough, too_few;
Steve Block6ded16b2010-05-10 14:33:55 +01001220 __ cmp(r0, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001221 __ b(lt, &too_few);
1222 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1223 __ b(eq, &dont_adapt_arguments);
1224
1225 { // Enough parameters: actual >= expected
1226 __ bind(&enough);
1227 EnterArgumentsAdaptorFrame(masm);
1228
1229 // Calculate copy start address into r0 and copy end address into r2.
1230 // r0: actual number of arguments as a smi
1231 // r1: function
1232 // r2: expected number of arguments
1233 // r3: code entry to call
1234 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1235 // adjust for return address and receiver
1236 __ add(r0, r0, Operand(2 * kPointerSize));
1237 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1238
1239 // Copy the arguments (including the receiver) to the new stack frame.
1240 // r0: copy start address
1241 // r1: function
1242 // r2: copy end address
1243 // r3: code entry to call
1244
1245 Label copy;
1246 __ bind(&copy);
1247 __ ldr(ip, MemOperand(r0, 0));
1248 __ push(ip);
1249 __ cmp(r0, r2); // Compare before moving to next argument.
1250 __ sub(r0, r0, Operand(kPointerSize));
1251 __ b(ne, &copy);
1252
1253 __ b(&invoke);
1254 }
1255
1256 { // Too few parameters: Actual < expected
1257 __ bind(&too_few);
1258 EnterArgumentsAdaptorFrame(masm);
1259
1260 // Calculate copy start address into r0 and copy end address is fp.
1261 // r0: actual number of arguments as a smi
1262 // r1: function
1263 // r2: expected number of arguments
1264 // r3: code entry to call
1265 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1266
1267 // Copy the arguments (including the receiver) to the new stack frame.
1268 // r0: copy start address
1269 // r1: function
1270 // r2: expected number of arguments
1271 // r3: code entry to call
1272 Label copy;
1273 __ bind(&copy);
1274 // Adjust load for return address and receiver.
1275 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1276 __ push(ip);
1277 __ cmp(r0, fp); // Compare before moving to next argument.
1278 __ sub(r0, r0, Operand(kPointerSize));
1279 __ b(ne, &copy);
1280
1281 // Fill the remaining expected arguments with undefined.
1282 // r1: function
1283 // r2: expected number of arguments
1284 // r3: code entry to call
1285 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1286 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1287 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1288
1289 Label fill;
1290 __ bind(&fill);
1291 __ push(ip);
1292 __ cmp(sp, r2);
1293 __ b(ne, &fill);
1294 }
1295
1296 // Call the entry point.
1297 __ bind(&invoke);
1298 __ Call(r3);
1299
1300 // Exit frame and return.
1301 LeaveArgumentsAdaptorFrame(masm);
1302 __ Jump(lr);
1303
1304
1305 // -------------------------------------------
1306 // Dont adapt arguments.
1307 // -------------------------------------------
1308 __ bind(&dont_adapt_arguments);
1309 __ Jump(r3);
1310}
1311
1312
1313#undef __
1314
1315} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001316
1317#endif // V8_TARGET_ARCH_ARM