blob: 5718cb3ce2ff96cf36a547680f130dd71c9bf8ec [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "codegen-inl.h"
31#include "debug.h"
32#include "runtime.h"
33
34namespace v8 {
35namespace internal {
36
37
38#define __ ACCESS_MASM(masm)
39
40
Leon Clarkee46be812010-01-19 14:06:41 +000041void Builtins::Generate_Adaptor(MacroAssembler* masm,
42 CFunctionId id,
43 BuiltinExtraArguments extra_args) {
44 // ----------- S t a t e -------------
45 // -- r0 : number of arguments excluding receiver
46 // -- r1 : called function (only guaranteed when
47 // extra_args requires it)
48 // -- cp : context
49 // -- sp[0] : last argument
50 // -- ...
51 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
52 // -- sp[4 * argc] : receiver
53 // -----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +000054
Leon Clarkee46be812010-01-19 14:06:41 +000055 // Insert extra arguments.
56 int num_extra_args = 0;
57 if (extra_args == NEEDS_CALLED_FUNCTION) {
58 num_extra_args = 1;
59 __ push(r1);
60 } else {
61 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
62 }
63
Steve Block6ded16b2010-05-10 14:33:55 +010064 // JumpToExternalReference expects r0 to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000065 // including the receiver and the extra arguments.
66 __ add(r0, r0, Operand(num_extra_args + 1));
Steve Block6ded16b2010-05-10 14:33:55 +010067 __ JumpToExternalReference(ExternalReference(id));
Steve Blocka7e24c12009-10-30 11:49:00 +000068}
69
70
71// Load the built-in Array function from the current context.
72static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
73 // Load the global context.
74
75 __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
76 __ ldr(result,
77 FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
78 // Load the Array function from the global context.
79 __ ldr(result,
80 MemOperand(result,
81 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
82}
83
84
85// This constant has the same value as JSArray::kPreallocatedArrayElements and
86// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding
87// below should be reconsidered.
88static const int kLoopUnfoldLimit = 4;
89
90
91// Allocate an empty JSArray. The allocated array is put into the result
92// register. An elements backing store is allocated with size initial_capacity
93// and filled with the hole values.
94static void AllocateEmptyJSArray(MacroAssembler* masm,
95 Register array_function,
96 Register result,
97 Register scratch1,
98 Register scratch2,
99 Register scratch3,
100 int initial_capacity,
101 Label* gc_required) {
102 ASSERT(initial_capacity > 0);
103 // Load the initial map from the array function.
104 __ ldr(scratch1, FieldMemOperand(array_function,
105 JSFunction::kPrototypeOrInitialMapOffset));
106
107 // Allocate the JSArray object together with space for a fixed array with the
108 // requested elements.
109 int size = JSArray::kSize + FixedArray::SizeFor(initial_capacity);
Kristian Monsen25f61362010-05-21 11:50:48 +0100110 __ AllocateInNewSpace(size,
Steve Blocka7e24c12009-10-30 11:49:00 +0000111 result,
112 scratch2,
113 scratch3,
114 gc_required,
115 TAG_OBJECT);
116
117 // Allocated the JSArray. Now initialize the fields except for the elements
118 // array.
119 // result: JSObject
120 // scratch1: initial map
121 // scratch2: start of next object
122 __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
123 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
124 __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
125 // Field JSArray::kElementsOffset is initialized later.
126 __ mov(scratch3, Operand(0));
127 __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
128
129 // Calculate the location of the elements array and set elements array member
130 // of the JSArray.
131 // result: JSObject
132 // scratch2: start of next object
133 __ lea(scratch1, MemOperand(result, JSArray::kSize));
134 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
135
136 // Clear the heap tag on the elements array.
137 __ and_(scratch1, scratch1, Operand(~kHeapObjectTagMask));
138
139 // Initialize the FixedArray and fill it with holes. FixedArray length is not
140 // stored as a smi.
141 // result: JSObject
142 // scratch1: elements array (untagged)
143 // scratch2: start of next object
144 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
145 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
146 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
147 __ mov(scratch3, Operand(initial_capacity));
148 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
149 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
150
151 // Fill the FixedArray with the hole value.
152 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
153 ASSERT(initial_capacity <= kLoopUnfoldLimit);
154 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
155 for (int i = 0; i < initial_capacity; i++) {
156 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
157 }
158}
159
160// Allocate a JSArray with the number of elements stored in a register. The
161// register array_function holds the built-in Array function and the register
162// array_size holds the size of the array as a smi. The allocated array is put
163// into the result register and beginning and end of the FixedArray elements
164// storage is put into registers elements_array_storage and elements_array_end
165// (see below for when that is not the case). If the parameter fill_with_holes
166// is true the allocated elements backing store is filled with the hole values
167// otherwise it is left uninitialized. When the backing store is filled the
168// register elements_array_storage is scratched.
169static void AllocateJSArray(MacroAssembler* masm,
170 Register array_function, // Array function.
171 Register array_size, // As a smi.
172 Register result,
173 Register elements_array_storage,
174 Register elements_array_end,
175 Register scratch1,
176 Register scratch2,
177 bool fill_with_hole,
178 Label* gc_required) {
179 Label not_empty, allocated;
180
181 // Load the initial map from the array function.
182 __ ldr(elements_array_storage,
183 FieldMemOperand(array_function,
184 JSFunction::kPrototypeOrInitialMapOffset));
185
186 // Check whether an empty sized array is requested.
187 __ tst(array_size, array_size);
188 __ b(nz, &not_empty);
189
190 // If an empty array is requested allocate a small elements array anyway. This
191 // keeps the code below free of special casing for the empty array.
192 int size = JSArray::kSize +
193 FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
Kristian Monsen25f61362010-05-21 11:50:48 +0100194 __ AllocateInNewSpace(size,
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 result,
196 elements_array_end,
197 scratch1,
198 gc_required,
199 TAG_OBJECT);
200 __ jmp(&allocated);
201
202 // Allocate the JSArray object together with space for a FixedArray with the
203 // requested number of elements.
204 __ bind(&not_empty);
205 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
206 __ mov(elements_array_end,
207 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
208 __ add(elements_array_end,
209 elements_array_end,
210 Operand(array_size, ASR, kSmiTagSize));
Kristian Monsen25f61362010-05-21 11:50:48 +0100211 __ AllocateInNewSpace(
212 elements_array_end,
213 result,
214 scratch1,
215 scratch2,
216 gc_required,
217 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Blocka7e24c12009-10-30 11:49:00 +0000218
219 // Allocated the JSArray. Now initialize the fields except for the elements
220 // array.
221 // result: JSObject
222 // elements_array_storage: initial map
223 // array_size: size of array (smi)
224 __ bind(&allocated);
225 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
226 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
227 __ str(elements_array_storage,
228 FieldMemOperand(result, JSArray::kPropertiesOffset));
229 // Field JSArray::kElementsOffset is initialized later.
230 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
231
232 // Calculate the location of the elements array and set elements array member
233 // of the JSArray.
234 // result: JSObject
235 // array_size: size of array (smi)
236 __ add(elements_array_storage, result, Operand(JSArray::kSize));
237 __ str(elements_array_storage,
238 FieldMemOperand(result, JSArray::kElementsOffset));
239
240 // Clear the heap tag on the elements array.
241 __ and_(elements_array_storage,
242 elements_array_storage,
243 Operand(~kHeapObjectTagMask));
244 // Initialize the fixed array and fill it with holes. FixedArray length is not
245 // stored as a smi.
246 // result: JSObject
247 // elements_array_storage: elements array (untagged)
248 // array_size: size of array (smi)
249 ASSERT(kSmiTag == 0);
250 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
251 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
252 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
253 // Convert array_size from smi to value.
254 __ mov(array_size,
255 Operand(array_size, ASR, kSmiTagSize));
256 __ tst(array_size, array_size);
257 // Length of the FixedArray is the number of pre-allocated elements if
258 // the actual JSArray has length 0 and the size of the JSArray for non-empty
259 // JSArrays. The length of a FixedArray is not stored as a smi.
260 __ mov(array_size, Operand(JSArray::kPreallocatedArrayElements), LeaveCC, eq);
261 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
262 __ str(array_size,
263 MemOperand(elements_array_storage, kPointerSize, PostIndex));
264
265 // Calculate elements array and elements array end.
266 // result: JSObject
267 // elements_array_storage: elements array element storage
268 // array_size: size of elements array
269 __ add(elements_array_end,
270 elements_array_storage,
271 Operand(array_size, LSL, kPointerSizeLog2));
272
273 // Fill the allocated FixedArray with the hole value if requested.
274 // result: JSObject
275 // elements_array_storage: elements array element storage
276 // elements_array_end: start of next object
277 if (fill_with_hole) {
278 Label loop, entry;
279 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
280 __ jmp(&entry);
281 __ bind(&loop);
282 __ str(scratch1,
283 MemOperand(elements_array_storage, kPointerSize, PostIndex));
284 __ bind(&entry);
285 __ cmp(elements_array_storage, elements_array_end);
286 __ b(lt, &loop);
287 }
288}
289
290// Create a new array for the built-in Array function. This function allocates
291// the JSArray object and the FixedArray elements array and initializes these.
292// If the Array cannot be constructed in native code the runtime is called. This
293// function assumes the following state:
294// r0: argc
295// r1: constructor (built-in Array function)
296// lr: return address
297// sp[0]: last argument
298// This function is used for both construct and normal calls of Array. The only
299// difference between handling a construct call and a normal call is that for a
300// construct call the constructor function in r1 needs to be preserved for
301// entering the generic code. In both cases argc in r0 needs to be preserved.
302// Both registers are preserved by this code so no need to differentiate between
303// construct call and normal call.
304static void ArrayNativeCode(MacroAssembler* masm,
Steve Blockd0582a62009-12-15 09:54:21 +0000305 Label* call_generic_code) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000306 Label argc_one_or_more, argc_two_or_more;
307
308 // Check for array construction with zero arguments or one.
309 __ cmp(r0, Operand(0));
310 __ b(ne, &argc_one_or_more);
311
312 // Handle construction of an empty array.
313 AllocateEmptyJSArray(masm,
314 r1,
315 r2,
316 r3,
317 r4,
318 r5,
319 JSArray::kPreallocatedArrayElements,
320 call_generic_code);
321 __ IncrementCounter(&Counters::array_function_native, 1, r3, r4);
322 // Setup return value, remove receiver from stack and return.
323 __ mov(r0, r2);
324 __ add(sp, sp, Operand(kPointerSize));
325 __ Jump(lr);
326
327 // Check for one argument. Bail out if argument is not smi or if it is
328 // negative.
329 __ bind(&argc_one_or_more);
330 __ cmp(r0, Operand(1));
331 __ b(ne, &argc_two_or_more);
332 ASSERT(kSmiTag == 0);
333 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
334 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
335 __ b(ne, call_generic_code);
336
337 // Handle construction of an empty array of a certain size. Bail out if size
338 // is too large to actually allocate an elements array.
339 ASSERT(kSmiTag == 0);
340 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
341 __ b(ge, call_generic_code);
342
343 // r0: argc
344 // r1: constructor
345 // r2: array_size (smi)
346 // sp[0]: argument
347 AllocateJSArray(masm,
348 r1,
349 r2,
350 r3,
351 r4,
352 r5,
353 r6,
354 r7,
355 true,
356 call_generic_code);
357 __ IncrementCounter(&Counters::array_function_native, 1, r2, r4);
358 // Setup return value, remove receiver and argument from stack and return.
359 __ mov(r0, r3);
360 __ add(sp, sp, Operand(2 * kPointerSize));
361 __ Jump(lr);
362
363 // Handle construction of an array from a list of arguments.
364 __ bind(&argc_two_or_more);
365 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
366
367 // r0: argc
368 // r1: constructor
369 // r2: array_size (smi)
370 // sp[0]: last argument
371 AllocateJSArray(masm,
372 r1,
373 r2,
374 r3,
375 r4,
376 r5,
377 r6,
378 r7,
379 false,
380 call_generic_code);
381 __ IncrementCounter(&Counters::array_function_native, 1, r2, r6);
382
383 // Fill arguments as array elements. Copy from the top of the stack (last
384 // element) to the array backing store filling it backwards. Note:
385 // elements_array_end points after the backing store therefore PreIndex is
386 // used when filling the backing store.
387 // r0: argc
388 // r3: JSArray
389 // r4: elements_array storage start (untagged)
390 // r5: elements_array_end (untagged)
391 // sp[0]: last argument
392 Label loop, entry;
393 __ jmp(&entry);
394 __ bind(&loop);
395 __ ldr(r2, MemOperand(sp, kPointerSize, PostIndex));
396 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
397 __ bind(&entry);
398 __ cmp(r4, r5);
399 __ b(lt, &loop);
400
401 // Remove caller arguments and receiver from the stack, setup return value and
402 // return.
403 // r0: argc
404 // r3: JSArray
405 // sp[0]: receiver
406 __ add(sp, sp, Operand(kPointerSize));
407 __ mov(r0, r3);
408 __ Jump(lr);
409}
410
411
412void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
413 // ----------- S t a t e -------------
414 // -- r0 : number of arguments
415 // -- lr : return address
416 // -- sp[...]: constructor arguments
417 // -----------------------------------
418 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
419
420 // Get the Array function.
421 GenerateLoadArrayFunction(masm, r1);
422
423 if (FLAG_debug_code) {
424 // Initial map for the builtin Array function shoud be a map.
425 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
426 __ tst(r2, Operand(kSmiTagMask));
427 __ Assert(ne, "Unexpected initial map for Array function");
428 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
429 __ Assert(eq, "Unexpected initial map for Array function");
430 }
431
432 // Run the native code for the Array function called as a normal function.
433 ArrayNativeCode(masm, &generic_array_code);
434
435 // Jump to the generic array code if the specialized code cannot handle
436 // the construction.
437 __ bind(&generic_array_code);
438 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
439 Handle<Code> array_code(code);
440 __ Jump(array_code, RelocInfo::CODE_TARGET);
441}
442
443
444void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
445 // ----------- S t a t e -------------
446 // -- r0 : number of arguments
447 // -- r1 : constructor function
448 // -- lr : return address
449 // -- sp[...]: constructor arguments
450 // -----------------------------------
451 Label generic_constructor;
452
453 if (FLAG_debug_code) {
454 // The array construct code is only set for the builtin Array function which
455 // always have a map.
456 GenerateLoadArrayFunction(masm, r2);
457 __ cmp(r1, r2);
458 __ Assert(eq, "Unexpected Array function");
459 // Initial map for the builtin Array function should be a map.
460 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
461 __ tst(r2, Operand(kSmiTagMask));
462 __ Assert(ne, "Unexpected initial map for Array function");
463 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
464 __ Assert(eq, "Unexpected initial map for Array function");
465 }
466
467 // Run the native code for the Array function called as a constructor.
468 ArrayNativeCode(masm, &generic_constructor);
469
470 // Jump to the generic construct code in case the specialized code cannot
471 // handle the construction.
472 __ bind(&generic_constructor);
473 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
474 Handle<Code> generic_construct_stub(code);
475 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
476}
477
478
479void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
480 // ----------- S t a t e -------------
481 // -- r0 : number of arguments
482 // -- r1 : constructor function
483 // -- lr : return address
484 // -- sp[...]: constructor arguments
485 // -----------------------------------
486
487 Label non_function_call;
488 // Check that the function is not a smi.
489 __ tst(r1, Operand(kSmiTagMask));
490 __ b(eq, &non_function_call);
491 // Check that the function is a JSFunction.
492 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
493 __ b(ne, &non_function_call);
494
495 // Jump to the function-specific construct stub.
496 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
497 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
498 __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
499
500 // r0: number of arguments
501 // r1: called object
502 __ bind(&non_function_call);
Andrei Popescu402d9372010-02-26 13:31:12 +0000503 // CALL_NON_FUNCTION expects the non-function constructor as receiver
504 // (instead of the original receiver from the call site). The receiver is
505 // stack element argc.
506 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +0000507 // Set expected number of arguments to zero (not changing r0).
508 __ mov(r2, Operand(0));
509 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
510 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
511 RelocInfo::CODE_TARGET);
512}
513
514
Leon Clarkee46be812010-01-19 14:06:41 +0000515static void Generate_JSConstructStubHelper(MacroAssembler* masm,
516 bool is_api_function) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000517 // Enter a construct frame.
518 __ EnterConstructFrame();
519
520 // Preserve the two incoming parameters on the stack.
521 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
522 __ push(r0); // Smi-tagged arguments count.
523 __ push(r1); // Constructor function.
524
525 // Use r7 for holding undefined which is used in several places below.
526 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
527
528 // Try to allocate the object without transitioning into C code. If any of the
529 // preconditions is not met, the code bails out to the runtime call.
530 Label rt_call, allocated;
531 if (FLAG_inline_new) {
532 Label undo_allocation;
533#ifdef ENABLE_DEBUGGER_SUPPORT
534 ExternalReference debug_step_in_fp =
535 ExternalReference::debug_step_in_fp_address();
536 __ mov(r2, Operand(debug_step_in_fp));
537 __ ldr(r2, MemOperand(r2));
538 __ tst(r2, r2);
539 __ b(nz, &rt_call);
540#endif
541
542 // Load the initial map and verify that it is in fact a map.
543 // r1: constructor function
544 // r7: undefined
545 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
546 __ tst(r2, Operand(kSmiTagMask));
547 __ b(eq, &rt_call);
548 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
549 __ b(ne, &rt_call);
550
551 // Check that the constructor is not constructing a JSFunction (see comments
552 // in Runtime_NewObject in runtime.cc). In which case the initial map's
553 // instance type would be JS_FUNCTION_TYPE.
554 // r1: constructor function
555 // r2: initial map
556 // r7: undefined
557 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
558 __ b(eq, &rt_call);
559
560 // Now allocate the JSObject on the heap.
561 // r1: constructor function
562 // r2: initial map
563 // r7: undefined
564 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +0100565 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566
567 // Allocated the JSObject, now initialize the fields. Map is set to initial
568 // map and properties and elements are set to empty fixed array.
569 // r1: constructor function
570 // r2: initial map
571 // r3: object size
572 // r4: JSObject (not tagged)
573 // r7: undefined
574 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
575 __ mov(r5, r4);
576 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
577 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
578 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
579 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
580 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
581 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
582
583 // Fill all the in-object properties with undefined.
584 // r1: constructor function
585 // r2: initial map
586 // r3: object size (in words)
587 // r4: JSObject (not tagged)
588 // r5: First in-object property of JSObject (not tagged)
589 // r7: undefined
590 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
591 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
592 { Label loop, entry;
593 __ b(&entry);
594 __ bind(&loop);
595 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
596 __ bind(&entry);
Steve Block6ded16b2010-05-10 14:33:55 +0100597 __ cmp(r5, r6);
Steve Blocka7e24c12009-10-30 11:49:00 +0000598 __ b(lt, &loop);
599 }
600
601 // Add the object tag to make the JSObject real, so that we can continue and
602 // jump into the continuation code at any time from now on. Any failures
603 // need to undo the allocation, so that the heap is in a consistent state
604 // and verifiable.
605 __ add(r4, r4, Operand(kHeapObjectTag));
606
607 // Check if a non-empty properties array is needed. Continue with allocated
608 // object if not fall through to runtime call if it is.
609 // r1: constructor function
610 // r4: JSObject
611 // r5: start of next object (not tagged)
612 // r7: undefined
613 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
614 // The field instance sizes contains both pre-allocated property fields and
615 // in-object properties.
616 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
617 __ and_(r6,
618 r0,
619 Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8));
620 __ add(r3, r3, Operand(r6, LSR, Map::kPreAllocatedPropertyFieldsByte * 8));
621 __ and_(r6, r0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8));
622 __ sub(r3, r3, Operand(r6, LSR, Map::kInObjectPropertiesByte * 8), SetCC);
623
624 // Done if no extra properties are to be allocated.
625 __ b(eq, &allocated);
626 __ Assert(pl, "Property allocation count failed.");
627
628 // Scale the number of elements by pointer size and add the header for
629 // FixedArrays to the start of the next object calculation from above.
630 // r1: constructor
631 // r3: number of elements in properties array
632 // r4: JSObject
633 // r5: start of next object
634 // r7: undefined
635 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
Kristian Monsen25f61362010-05-21 11:50:48 +0100636 __ AllocateInNewSpace(
637 r0,
638 r5,
639 r6,
640 r2,
641 &undo_allocation,
642 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
Steve Blocka7e24c12009-10-30 11:49:00 +0000643
644 // Initialize the FixedArray.
645 // r1: constructor
646 // r3: number of elements in properties array
647 // r4: JSObject
648 // r5: FixedArray (not tagged)
649 // r7: undefined
650 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
651 __ mov(r2, r5);
652 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
653 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
654 ASSERT_EQ(1 * kPointerSize, Array::kLengthOffset);
655 __ str(r3, MemOperand(r2, kPointerSize, PostIndex));
656
657 // Initialize the fields to undefined.
658 // r1: constructor function
659 // r2: First element of FixedArray (not tagged)
660 // r3: number of elements in properties array
661 // r4: JSObject
662 // r5: FixedArray (not tagged)
663 // r7: undefined
664 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
665 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
666 { Label loop, entry;
667 __ b(&entry);
668 __ bind(&loop);
669 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
670 __ bind(&entry);
Steve Block6ded16b2010-05-10 14:33:55 +0100671 __ cmp(r2, r6);
Steve Blocka7e24c12009-10-30 11:49:00 +0000672 __ b(lt, &loop);
673 }
674
675 // Store the initialized FixedArray into the properties field of
676 // the JSObject
677 // r1: constructor function
678 // r4: JSObject
679 // r5: FixedArray (not tagged)
680 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
681 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
682
683 // Continue with JSObject being successfully allocated
684 // r1: constructor function
685 // r4: JSObject
686 __ jmp(&allocated);
687
688 // Undo the setting of the new top so that the heap is verifiable. For
689 // example, the map's unused properties potentially do not match the
690 // allocated objects unused properties.
691 // r4: JSObject (previous new top)
692 __ bind(&undo_allocation);
693 __ UndoAllocationInNewSpace(r4, r5);
694 }
695
696 // Allocate the new receiver object using the runtime call.
697 // r1: constructor function
698 __ bind(&rt_call);
699 __ push(r1); // argument for Runtime_NewObject
700 __ CallRuntime(Runtime::kNewObject, 1);
701 __ mov(r4, r0);
702
703 // Receiver for constructor call allocated.
704 // r4: JSObject
705 __ bind(&allocated);
706 __ push(r4);
707
708 // Push the function and the allocated receiver from the stack.
709 // sp[0]: receiver (newly allocated object)
710 // sp[1]: constructor function
711 // sp[2]: number of arguments (smi-tagged)
712 __ ldr(r1, MemOperand(sp, kPointerSize));
713 __ push(r1); // Constructor function.
714 __ push(r4); // Receiver.
715
716 // Reload the number of arguments from the stack.
717 // r1: constructor function
718 // sp[0]: receiver
719 // sp[1]: constructor function
720 // sp[2]: receiver
721 // sp[3]: constructor function
722 // sp[4]: number of arguments (smi-tagged)
723 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
724
725 // Setup pointer to last argument.
726 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
727
728 // Setup number of arguments for function call below
729 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
730
731 // Copy arguments and receiver to the expression stack.
732 // r0: number of arguments
733 // r2: address of last argument (caller sp)
734 // r1: constructor function
735 // r3: number of arguments (smi-tagged)
736 // sp[0]: receiver
737 // sp[1]: constructor function
738 // sp[2]: receiver
739 // sp[3]: constructor function
740 // sp[4]: number of arguments (smi-tagged)
741 Label loop, entry;
742 __ b(&entry);
743 __ bind(&loop);
744 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
745 __ push(ip);
746 __ bind(&entry);
747 __ sub(r3, r3, Operand(2), SetCC);
748 __ b(ge, &loop);
749
750 // Call the function.
751 // r0: number of arguments
752 // r1: constructor function
Leon Clarkee46be812010-01-19 14:06:41 +0000753 if (is_api_function) {
754 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
755 Handle<Code> code = Handle<Code>(
756 Builtins::builtin(Builtins::HandleApiCallConstruct));
757 ParameterCount expected(0);
758 __ InvokeCode(code, expected, expected,
759 RelocInfo::CODE_TARGET, CALL_FUNCTION);
760 } else {
761 ParameterCount actual(r0);
762 __ InvokeFunction(r1, actual, CALL_FUNCTION);
763 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000764
765 // Pop the function from the stack.
766 // sp[0]: constructor function
767 // sp[2]: receiver
768 // sp[3]: constructor function
769 // sp[4]: number of arguments (smi-tagged)
770 __ pop();
771
772 // Restore context from the frame.
773 // r0: result
774 // sp[0]: receiver
775 // sp[1]: constructor function
776 // sp[2]: number of arguments (smi-tagged)
777 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
778
779 // If the result is an object (in the ECMA sense), we should get rid
780 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
781 // on page 74.
782 Label use_receiver, exit;
783
784 // If the result is a smi, it is *not* an object in the ECMA sense.
785 // r0: result
786 // sp[0]: receiver (newly allocated object)
787 // sp[1]: constructor function
788 // sp[2]: number of arguments (smi-tagged)
789 __ tst(r0, Operand(kSmiTagMask));
790 __ b(eq, &use_receiver);
791
792 // If the type of the result (stored in its map) is less than
793 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
794 __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
795 __ b(ge, &exit);
796
797 // Throw away the result of the constructor invocation and use the
798 // on-stack receiver as the result.
799 __ bind(&use_receiver);
800 __ ldr(r0, MemOperand(sp));
801
802 // Remove receiver from the stack, remove caller arguments, and
803 // return.
804 __ bind(&exit);
805 // r0: result
806 // sp[0]: receiver (newly allocated object)
807 // sp[1]: constructor function
808 // sp[2]: number of arguments (smi-tagged)
809 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
810 __ LeaveConstructFrame();
811 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
812 __ add(sp, sp, Operand(kPointerSize));
813 __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
814 __ Jump(lr);
815}
816
817
Leon Clarkee46be812010-01-19 14:06:41 +0000818void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
819 Generate_JSConstructStubHelper(masm, false);
820}
821
822
823void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
824 Generate_JSConstructStubHelper(masm, true);
825}
826
827
Steve Blocka7e24c12009-10-30 11:49:00 +0000828static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
829 bool is_construct) {
830 // Called from Generate_JS_Entry
831 // r0: code entry
832 // r1: function
833 // r2: receiver
834 // r3: argc
835 // r4: argv
836 // r5-r7, cp may be clobbered
837
838 // Clear the context before we push it when entering the JS frame.
839 __ mov(cp, Operand(0));
840
841 // Enter an internal frame.
842 __ EnterInternalFrame();
843
844 // Set up the context from the function argument.
845 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
846
847 // Set up the roots register.
848 ExternalReference roots_address = ExternalReference::roots_address();
849 __ mov(r10, Operand(roots_address));
850
851 // Push the function and the receiver onto the stack.
852 __ push(r1);
853 __ push(r2);
854
855 // Copy arguments to the stack in a loop.
856 // r1: function
857 // r3: argc
858 // r4: argv, i.e. points to first arg
859 Label loop, entry;
860 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
861 // r2 points past last arg.
862 __ b(&entry);
863 __ bind(&loop);
864 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
865 __ ldr(r0, MemOperand(r0)); // dereference handle
866 __ push(r0); // push parameter
867 __ bind(&entry);
Steve Block6ded16b2010-05-10 14:33:55 +0100868 __ cmp(r4, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000869 __ b(ne, &loop);
870
871 // Initialize all JavaScript callee-saved registers, since they will be seen
872 // by the garbage collector as part of handlers.
873 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
874 __ mov(r5, Operand(r4));
875 __ mov(r6, Operand(r4));
876 __ mov(r7, Operand(r4));
877 if (kR9Available == 1) {
878 __ mov(r9, Operand(r4));
879 }
880
881 // Invoke the code and pass argc as r0.
882 __ mov(r0, Operand(r3));
883 if (is_construct) {
884 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
885 RelocInfo::CODE_TARGET);
886 } else {
887 ParameterCount actual(r0);
888 __ InvokeFunction(r1, actual, CALL_FUNCTION);
889 }
890
891 // Exit the JS frame and remove the parameters (except function), and return.
892 // Respect ABI stack constraint.
893 __ LeaveInternalFrame();
894 __ Jump(lr);
895
896 // r0: result
897}
898
899
900void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
901 Generate_JSEntryTrampolineHelper(masm, false);
902}
903
904
905void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
906 Generate_JSEntryTrampolineHelper(masm, true);
907}
908
909
910void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
911 // 1. Make sure we have at least one argument.
Andrei Popescu402d9372010-02-26 13:31:12 +0000912 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000913 { Label done;
914 __ tst(r0, Operand(r0));
915 __ b(ne, &done);
916 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
917 __ push(r2);
918 __ add(r0, r0, Operand(1));
919 __ bind(&done);
920 }
921
Andrei Popescu402d9372010-02-26 13:31:12 +0000922 // 2. Get the function to call (passed as receiver) from the stack, check
923 // if it is a function.
924 // r0: actual number of arguments
925 Label non_function;
926 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
927 __ tst(r1, Operand(kSmiTagMask));
928 __ b(eq, &non_function);
929 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
930 __ b(ne, &non_function);
Steve Blocka7e24c12009-10-30 11:49:00 +0000931
Andrei Popescu402d9372010-02-26 13:31:12 +0000932 // 3a. Patch the first argument if necessary when calling a function.
Steve Blocka7e24c12009-10-30 11:49:00 +0000933 // r0: actual number of arguments
934 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +0000935 Label shift_arguments;
936 { Label convert_to_object, use_global_receiver, patch_receiver;
937 // Change context eagerly in case we need the global receiver.
938 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
939
Steve Blocka7e24c12009-10-30 11:49:00 +0000940 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
941 __ ldr(r2, MemOperand(r2, -kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000942 // r0: actual number of arguments
943 // r1: function
944 // r2: first argument
945 __ tst(r2, Operand(kSmiTagMask));
Andrei Popescu402d9372010-02-26 13:31:12 +0000946 __ b(eq, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000947
948 __ LoadRoot(r3, Heap::kNullValueRootIndex);
949 __ cmp(r2, r3);
950 __ b(eq, &use_global_receiver);
951 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
952 __ cmp(r2, r3);
953 __ b(eq, &use_global_receiver);
954
955 __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +0000956 __ b(lt, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000957 __ cmp(r3, Operand(LAST_JS_OBJECT_TYPE));
Andrei Popescu402d9372010-02-26 13:31:12 +0000958 __ b(le, &shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000959
Andrei Popescu402d9372010-02-26 13:31:12 +0000960 __ bind(&convert_to_object);
961 __ EnterInternalFrame(); // In order to preserve argument count.
962 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 __ push(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000964
965 __ push(r2);
966 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
967 __ mov(r2, r0);
968
Steve Blocka7e24c12009-10-30 11:49:00 +0000969 __ pop(r0);
970 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000971 __ LeaveInternalFrame();
Andrei Popescu402d9372010-02-26 13:31:12 +0000972 // Restore the function to r1.
973 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
974 __ jmp(&patch_receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +0000975
Andrei Popescu402d9372010-02-26 13:31:12 +0000976 // Use the global receiver object from the called function as the
977 // receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000978 __ bind(&use_global_receiver);
979 const int kGlobalIndex =
980 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
981 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
Steve Blockd0582a62009-12-15 09:54:21 +0000982 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
983 __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
Steve Blocka7e24c12009-10-30 11:49:00 +0000984 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
985
986 __ bind(&patch_receiver);
987 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
988 __ str(r2, MemOperand(r3, -kPointerSize));
989
Andrei Popescu402d9372010-02-26 13:31:12 +0000990 __ jmp(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000991 }
992
Andrei Popescu402d9372010-02-26 13:31:12 +0000993 // 3b. Patch the first argument when calling a non-function. The
994 // CALL_NON_FUNCTION builtin expects the non-function callee as
995 // receiver, so overwrite the first argument which will ultimately
996 // become the receiver.
997 // r0: actual number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000998 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +0000999 __ bind(&non_function);
1000 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1001 __ str(r1, MemOperand(r2, -kPointerSize));
1002 // Clear r1 to indicate a non-function being called.
1003 __ mov(r1, Operand(0));
1004
1005 // 4. Shift arguments and return address one slot down on the stack
1006 // (overwriting the original receiver). Adjust argument count to make
1007 // the original first argument the new receiver.
1008 // r0: actual number of arguments
1009 // r1: function
1010 __ bind(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +00001011 { Label loop;
1012 // Calculate the copy start address (destination). Copy end address is sp.
1013 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
Steve Blocka7e24c12009-10-30 11:49:00 +00001014
1015 __ bind(&loop);
1016 __ ldr(ip, MemOperand(r2, -kPointerSize));
1017 __ str(ip, MemOperand(r2));
1018 __ sub(r2, r2, Operand(kPointerSize));
1019 __ cmp(r2, sp);
1020 __ b(ne, &loop);
Andrei Popescu402d9372010-02-26 13:31:12 +00001021 // Adjust the actual number of arguments and remove the top element
1022 // (which is a copy of the last argument).
1023 __ sub(r0, r0, Operand(1));
1024 __ pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00001025 }
1026
Andrei Popescu402d9372010-02-26 13:31:12 +00001027 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
Steve Blocka7e24c12009-10-30 11:49:00 +00001028 // r0: actual number of arguments
1029 // r1: function
Andrei Popescu402d9372010-02-26 13:31:12 +00001030 { Label function;
Steve Blocka7e24c12009-10-30 11:49:00 +00001031 __ tst(r1, r1);
Andrei Popescu402d9372010-02-26 13:31:12 +00001032 __ b(ne, &function);
Steve Blocka7e24c12009-10-30 11:49:00 +00001033 __ mov(r2, Operand(0)); // expected arguments is 0 for CALL_NON_FUNCTION
1034 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1035 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
1036 RelocInfo::CODE_TARGET);
Andrei Popescu402d9372010-02-26 13:31:12 +00001037 __ bind(&function);
Steve Blocka7e24c12009-10-30 11:49:00 +00001038 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001039
1040 // 5b. Get the code to call from the function and check that the number of
1041 // expected arguments matches what we're providing. If so, jump
1042 // (tail-call) to the code in register edx without checking arguments.
1043 // r0: actual number of arguments
1044 // r1: function
1045 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1046 __ ldr(r2,
1047 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1048 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
1049 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1050 __ cmp(r2, r0); // Check formal and actual parameter counts.
1051 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
1052 RelocInfo::CODE_TARGET, ne);
1053
1054 ParameterCount expected(0);
1055 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00001056}
1057
1058
1059void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1060 const int kIndexOffset = -5 * kPointerSize;
1061 const int kLimitOffset = -4 * kPointerSize;
1062 const int kArgsOffset = 2 * kPointerSize;
1063 const int kRecvOffset = 3 * kPointerSize;
1064 const int kFunctionOffset = 4 * kPointerSize;
1065
1066 __ EnterInternalFrame();
1067
1068 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1069 __ push(r0);
1070 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1071 __ push(r0);
1072 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_JS);
1073
Steve Blockd0582a62009-12-15 09:54:21 +00001074 // Check the stack for overflow. We are not trying need to catch
1075 // interruptions (e.g. debug break and preemption) here, so the "real stack
1076 // limit" is checked.
Steve Blocka7e24c12009-10-30 11:49:00 +00001077 Label okay;
Steve Blockd0582a62009-12-15 09:54:21 +00001078 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1079 // Make r2 the space we have left. The stack might already be overflowed
1080 // here which will cause r2 to become negative.
Steve Blocka7e24c12009-10-30 11:49:00 +00001081 __ sub(r2, sp, r2);
Steve Blockd0582a62009-12-15 09:54:21 +00001082 // Check if the arguments will overflow the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +00001083 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
Steve Blockd0582a62009-12-15 09:54:21 +00001084 __ b(gt, &okay); // Signed comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +00001085
1086 // Out of stack space.
1087 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1088 __ push(r1);
1089 __ push(r0);
1090 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_JS);
Steve Blockd0582a62009-12-15 09:54:21 +00001091 // End of stack check.
Steve Blocka7e24c12009-10-30 11:49:00 +00001092
1093 // Push current limit and index.
1094 __ bind(&okay);
1095 __ push(r0); // limit
1096 __ mov(r1, Operand(0)); // initial index
1097 __ push(r1);
1098
1099 // Change context eagerly to get the right global object if necessary.
1100 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1101 __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
1102
1103 // Compute the receiver.
1104 Label call_to_object, use_global_receiver, push_receiver;
1105 __ ldr(r0, MemOperand(fp, kRecvOffset));
1106 __ tst(r0, Operand(kSmiTagMask));
1107 __ b(eq, &call_to_object);
1108 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1109 __ cmp(r0, r1);
1110 __ b(eq, &use_global_receiver);
1111 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1112 __ cmp(r0, r1);
1113 __ b(eq, &use_global_receiver);
1114
1115 // Check if the receiver is already a JavaScript object.
1116 // r0: receiver
1117 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
1118 __ b(lt, &call_to_object);
1119 __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
1120 __ b(le, &push_receiver);
1121
1122 // Convert the receiver to a regular object.
1123 // r0: receiver
1124 __ bind(&call_to_object);
1125 __ push(r0);
1126 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
1127 __ b(&push_receiver);
1128
1129 // Use the current global receiver object as the receiver.
1130 __ bind(&use_global_receiver);
1131 const int kGlobalOffset =
1132 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1133 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00001134 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1135 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00001136 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1137
1138 // Push the receiver.
1139 // r0: receiver
1140 __ bind(&push_receiver);
1141 __ push(r0);
1142
1143 // Copy all arguments from the array to the stack.
1144 Label entry, loop;
1145 __ ldr(r0, MemOperand(fp, kIndexOffset));
1146 __ b(&entry);
1147
1148 // Load the current argument from the arguments array and push it to the
1149 // stack.
1150 // r0: current argument index
1151 __ bind(&loop);
1152 __ ldr(r1, MemOperand(fp, kArgsOffset));
1153 __ push(r1);
1154 __ push(r0);
1155
1156 // Call the runtime to access the property in the arguments array.
1157 __ CallRuntime(Runtime::kGetProperty, 2);
1158 __ push(r0);
1159
1160 // Use inline caching to access the arguments.
1161 __ ldr(r0, MemOperand(fp, kIndexOffset));
1162 __ add(r0, r0, Operand(1 << kSmiTagSize));
1163 __ str(r0, MemOperand(fp, kIndexOffset));
1164
1165 // Test if the copy loop has finished copying all the elements from the
1166 // arguments object.
1167 __ bind(&entry);
1168 __ ldr(r1, MemOperand(fp, kLimitOffset));
1169 __ cmp(r0, r1);
1170 __ b(ne, &loop);
1171
1172 // Invoke the function.
1173 ParameterCount actual(r0);
1174 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1175 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1176 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1177
1178 // Tear down the internal frame and remove function, receiver and args.
1179 __ LeaveInternalFrame();
1180 __ add(sp, sp, Operand(3 * kPointerSize));
1181 __ Jump(lr);
1182}
1183
1184
1185static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1186 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1187 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1188 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1189 __ add(fp, sp, Operand(3 * kPointerSize));
1190}
1191
1192
1193static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1194 // ----------- S t a t e -------------
1195 // -- r0 : result being passed through
1196 // -----------------------------------
1197 // Get the number of arguments passed (as a smi), tear down the frame and
1198 // then tear down the parameters.
1199 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1200 __ mov(sp, fp);
1201 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1202 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1203 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1204}
1205
1206
1207void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1208 // ----------- S t a t e -------------
1209 // -- r0 : actual number of arguments
1210 // -- r1 : function (passed through to callee)
1211 // -- r2 : expected number of arguments
1212 // -- r3 : code entry to call
1213 // -----------------------------------
1214
1215 Label invoke, dont_adapt_arguments;
1216
1217 Label enough, too_few;
Steve Block6ded16b2010-05-10 14:33:55 +01001218 __ cmp(r0, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001219 __ b(lt, &too_few);
1220 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1221 __ b(eq, &dont_adapt_arguments);
1222
1223 { // Enough parameters: actual >= expected
1224 __ bind(&enough);
1225 EnterArgumentsAdaptorFrame(masm);
1226
1227 // Calculate copy start address into r0 and copy end address into r2.
1228 // r0: actual number of arguments as a smi
1229 // r1: function
1230 // r2: expected number of arguments
1231 // r3: code entry to call
1232 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1233 // adjust for return address and receiver
1234 __ add(r0, r0, Operand(2 * kPointerSize));
1235 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1236
1237 // Copy the arguments (including the receiver) to the new stack frame.
1238 // r0: copy start address
1239 // r1: function
1240 // r2: copy end address
1241 // r3: code entry to call
1242
1243 Label copy;
1244 __ bind(&copy);
1245 __ ldr(ip, MemOperand(r0, 0));
1246 __ push(ip);
1247 __ cmp(r0, r2); // Compare before moving to next argument.
1248 __ sub(r0, r0, Operand(kPointerSize));
1249 __ b(ne, &copy);
1250
1251 __ b(&invoke);
1252 }
1253
1254 { // Too few parameters: Actual < expected
1255 __ bind(&too_few);
1256 EnterArgumentsAdaptorFrame(masm);
1257
1258 // Calculate copy start address into r0 and copy end address is fp.
1259 // r0: actual number of arguments as a smi
1260 // r1: function
1261 // r2: expected number of arguments
1262 // r3: code entry to call
1263 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1264
1265 // Copy the arguments (including the receiver) to the new stack frame.
1266 // r0: copy start address
1267 // r1: function
1268 // r2: expected number of arguments
1269 // r3: code entry to call
1270 Label copy;
1271 __ bind(&copy);
1272 // Adjust load for return address and receiver.
1273 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1274 __ push(ip);
1275 __ cmp(r0, fp); // Compare before moving to next argument.
1276 __ sub(r0, r0, Operand(kPointerSize));
1277 __ b(ne, &copy);
1278
1279 // Fill the remaining expected arguments with undefined.
1280 // r1: function
1281 // r2: expected number of arguments
1282 // r3: code entry to call
1283 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1284 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1285 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1286
1287 Label fill;
1288 __ bind(&fill);
1289 __ push(ip);
1290 __ cmp(sp, r2);
1291 __ b(ne, &fill);
1292 }
1293
1294 // Call the entry point.
1295 __ bind(&invoke);
1296 __ Call(r3);
1297
1298 // Exit frame and return.
1299 LeaveArgumentsAdaptorFrame(masm);
1300 __ Jump(lr);
1301
1302
1303 // -------------------------------------------
1304 // Dont adapt arguments.
1305 // -------------------------------------------
1306 __ bind(&dont_adapt_arguments);
1307 __ Jump(r3);
1308}
1309
1310
1311#undef __
1312
1313} } // namespace v8::internal