blob: d7afb37af1794b7730b587a426cca82948d1e211 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "codegen-inl.h"
31#include "debug.h"
32#include "runtime.h"
33
34namespace v8 {
35namespace internal {
36
37
38#define __ ACCESS_MASM(masm)
39
40
41void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
42 // TODO(428): Don't pass the function in a static variable.
43 __ mov(ip, Operand(ExternalReference::builtin_passed_function()));
44 __ str(r1, MemOperand(ip, 0));
45
46 // The actual argument count has already been loaded into register
47 // r0, but JumpToRuntime expects r0 to contain the number of
48 // arguments including the receiver.
49 __ add(r0, r0, Operand(1));
50 __ JumpToRuntime(ExternalReference(id));
51}
52
53
54// Load the built-in Array function from the current context.
55static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
56 // Load the global context.
57
58 __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
59 __ ldr(result,
60 FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
61 // Load the Array function from the global context.
62 __ ldr(result,
63 MemOperand(result,
64 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
65}
66
67
68// This constant has the same value as JSArray::kPreallocatedArrayElements and
69// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding
70// below should be reconsidered.
71static const int kLoopUnfoldLimit = 4;
72
73
74// Allocate an empty JSArray. The allocated array is put into the result
75// register. An elements backing store is allocated with size initial_capacity
76// and filled with the hole values.
77static void AllocateEmptyJSArray(MacroAssembler* masm,
78 Register array_function,
79 Register result,
80 Register scratch1,
81 Register scratch2,
82 Register scratch3,
83 int initial_capacity,
84 Label* gc_required) {
85 ASSERT(initial_capacity > 0);
86 // Load the initial map from the array function.
87 __ ldr(scratch1, FieldMemOperand(array_function,
88 JSFunction::kPrototypeOrInitialMapOffset));
89
90 // Allocate the JSArray object together with space for a fixed array with the
91 // requested elements.
92 int size = JSArray::kSize + FixedArray::SizeFor(initial_capacity);
93 __ AllocateInNewSpace(size / kPointerSize,
94 result,
95 scratch2,
96 scratch3,
97 gc_required,
98 TAG_OBJECT);
99
100 // Allocated the JSArray. Now initialize the fields except for the elements
101 // array.
102 // result: JSObject
103 // scratch1: initial map
104 // scratch2: start of next object
105 __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
106 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
107 __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
108 // Field JSArray::kElementsOffset is initialized later.
109 __ mov(scratch3, Operand(0));
110 __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
111
112 // Calculate the location of the elements array and set elements array member
113 // of the JSArray.
114 // result: JSObject
115 // scratch2: start of next object
116 __ lea(scratch1, MemOperand(result, JSArray::kSize));
117 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
118
119 // Clear the heap tag on the elements array.
120 __ and_(scratch1, scratch1, Operand(~kHeapObjectTagMask));
121
122 // Initialize the FixedArray and fill it with holes. FixedArray length is not
123 // stored as a smi.
124 // result: JSObject
125 // scratch1: elements array (untagged)
126 // scratch2: start of next object
127 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
128 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
129 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
130 __ mov(scratch3, Operand(initial_capacity));
131 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
132 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
133
134 // Fill the FixedArray with the hole value.
135 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
136 ASSERT(initial_capacity <= kLoopUnfoldLimit);
137 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
138 for (int i = 0; i < initial_capacity; i++) {
139 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
140 }
141}
142
143// Allocate a JSArray with the number of elements stored in a register. The
144// register array_function holds the built-in Array function and the register
145// array_size holds the size of the array as a smi. The allocated array is put
146// into the result register and beginning and end of the FixedArray elements
147// storage is put into registers elements_array_storage and elements_array_end
148// (see below for when that is not the case). If the parameter fill_with_holes
149// is true the allocated elements backing store is filled with the hole values
150// otherwise it is left uninitialized. When the backing store is filled the
151// register elements_array_storage is scratched.
152static void AllocateJSArray(MacroAssembler* masm,
153 Register array_function, // Array function.
154 Register array_size, // As a smi.
155 Register result,
156 Register elements_array_storage,
157 Register elements_array_end,
158 Register scratch1,
159 Register scratch2,
160 bool fill_with_hole,
161 Label* gc_required) {
162 Label not_empty, allocated;
163
164 // Load the initial map from the array function.
165 __ ldr(elements_array_storage,
166 FieldMemOperand(array_function,
167 JSFunction::kPrototypeOrInitialMapOffset));
168
169 // Check whether an empty sized array is requested.
170 __ tst(array_size, array_size);
171 __ b(nz, &not_empty);
172
173 // If an empty array is requested allocate a small elements array anyway. This
174 // keeps the code below free of special casing for the empty array.
175 int size = JSArray::kSize +
176 FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
177 __ AllocateInNewSpace(size / kPointerSize,
178 result,
179 elements_array_end,
180 scratch1,
181 gc_required,
182 TAG_OBJECT);
183 __ jmp(&allocated);
184
185 // Allocate the JSArray object together with space for a FixedArray with the
186 // requested number of elements.
187 __ bind(&not_empty);
188 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
189 __ mov(elements_array_end,
190 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
191 __ add(elements_array_end,
192 elements_array_end,
193 Operand(array_size, ASR, kSmiTagSize));
194 __ AllocateInNewSpace(elements_array_end,
195 result,
196 scratch1,
197 scratch2,
198 gc_required,
199 TAG_OBJECT);
200
201 // Allocated the JSArray. Now initialize the fields except for the elements
202 // array.
203 // result: JSObject
204 // elements_array_storage: initial map
205 // array_size: size of array (smi)
206 __ bind(&allocated);
207 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
208 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
209 __ str(elements_array_storage,
210 FieldMemOperand(result, JSArray::kPropertiesOffset));
211 // Field JSArray::kElementsOffset is initialized later.
212 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
213
214 // Calculate the location of the elements array and set elements array member
215 // of the JSArray.
216 // result: JSObject
217 // array_size: size of array (smi)
218 __ add(elements_array_storage, result, Operand(JSArray::kSize));
219 __ str(elements_array_storage,
220 FieldMemOperand(result, JSArray::kElementsOffset));
221
222 // Clear the heap tag on the elements array.
223 __ and_(elements_array_storage,
224 elements_array_storage,
225 Operand(~kHeapObjectTagMask));
226 // Initialize the fixed array and fill it with holes. FixedArray length is not
227 // stored as a smi.
228 // result: JSObject
229 // elements_array_storage: elements array (untagged)
230 // array_size: size of array (smi)
231 ASSERT(kSmiTag == 0);
232 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
233 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
234 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
235 // Convert array_size from smi to value.
236 __ mov(array_size,
237 Operand(array_size, ASR, kSmiTagSize));
238 __ tst(array_size, array_size);
239 // Length of the FixedArray is the number of pre-allocated elements if
240 // the actual JSArray has length 0 and the size of the JSArray for non-empty
241 // JSArrays. The length of a FixedArray is not stored as a smi.
242 __ mov(array_size, Operand(JSArray::kPreallocatedArrayElements), LeaveCC, eq);
243 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
244 __ str(array_size,
245 MemOperand(elements_array_storage, kPointerSize, PostIndex));
246
247 // Calculate elements array and elements array end.
248 // result: JSObject
249 // elements_array_storage: elements array element storage
250 // array_size: size of elements array
251 __ add(elements_array_end,
252 elements_array_storage,
253 Operand(array_size, LSL, kPointerSizeLog2));
254
255 // Fill the allocated FixedArray with the hole value if requested.
256 // result: JSObject
257 // elements_array_storage: elements array element storage
258 // elements_array_end: start of next object
259 if (fill_with_hole) {
260 Label loop, entry;
261 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
262 __ jmp(&entry);
263 __ bind(&loop);
264 __ str(scratch1,
265 MemOperand(elements_array_storage, kPointerSize, PostIndex));
266 __ bind(&entry);
267 __ cmp(elements_array_storage, elements_array_end);
268 __ b(lt, &loop);
269 }
270}
271
272// Create a new array for the built-in Array function. This function allocates
273// the JSArray object and the FixedArray elements array and initializes these.
274// If the Array cannot be constructed in native code the runtime is called. This
275// function assumes the following state:
276// r0: argc
277// r1: constructor (built-in Array function)
278// lr: return address
279// sp[0]: last argument
280// This function is used for both construct and normal calls of Array. The only
281// difference between handling a construct call and a normal call is that for a
282// construct call the constructor function in r1 needs to be preserved for
283// entering the generic code. In both cases argc in r0 needs to be preserved.
284// Both registers are preserved by this code so no need to differentiate between
285// construct call and normal call.
286static void ArrayNativeCode(MacroAssembler* masm,
287 Label *call_generic_code) {
288 Label argc_one_or_more, argc_two_or_more;
289
290 // Check for array construction with zero arguments or one.
291 __ cmp(r0, Operand(0));
292 __ b(ne, &argc_one_or_more);
293
294 // Handle construction of an empty array.
295 AllocateEmptyJSArray(masm,
296 r1,
297 r2,
298 r3,
299 r4,
300 r5,
301 JSArray::kPreallocatedArrayElements,
302 call_generic_code);
303 __ IncrementCounter(&Counters::array_function_native, 1, r3, r4);
304 // Setup return value, remove receiver from stack and return.
305 __ mov(r0, r2);
306 __ add(sp, sp, Operand(kPointerSize));
307 __ Jump(lr);
308
309 // Check for one argument. Bail out if argument is not smi or if it is
310 // negative.
311 __ bind(&argc_one_or_more);
312 __ cmp(r0, Operand(1));
313 __ b(ne, &argc_two_or_more);
314 ASSERT(kSmiTag == 0);
315 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
316 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
317 __ b(ne, call_generic_code);
318
319 // Handle construction of an empty array of a certain size. Bail out if size
320 // is too large to actually allocate an elements array.
321 ASSERT(kSmiTag == 0);
322 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
323 __ b(ge, call_generic_code);
324
325 // r0: argc
326 // r1: constructor
327 // r2: array_size (smi)
328 // sp[0]: argument
329 AllocateJSArray(masm,
330 r1,
331 r2,
332 r3,
333 r4,
334 r5,
335 r6,
336 r7,
337 true,
338 call_generic_code);
339 __ IncrementCounter(&Counters::array_function_native, 1, r2, r4);
340 // Setup return value, remove receiver and argument from stack and return.
341 __ mov(r0, r3);
342 __ add(sp, sp, Operand(2 * kPointerSize));
343 __ Jump(lr);
344
345 // Handle construction of an array from a list of arguments.
346 __ bind(&argc_two_or_more);
347 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
348
349 // r0: argc
350 // r1: constructor
351 // r2: array_size (smi)
352 // sp[0]: last argument
353 AllocateJSArray(masm,
354 r1,
355 r2,
356 r3,
357 r4,
358 r5,
359 r6,
360 r7,
361 false,
362 call_generic_code);
363 __ IncrementCounter(&Counters::array_function_native, 1, r2, r6);
364
365 // Fill arguments as array elements. Copy from the top of the stack (last
366 // element) to the array backing store filling it backwards. Note:
367 // elements_array_end points after the backing store therefore PreIndex is
368 // used when filling the backing store.
369 // r0: argc
370 // r3: JSArray
371 // r4: elements_array storage start (untagged)
372 // r5: elements_array_end (untagged)
373 // sp[0]: last argument
374 Label loop, entry;
375 __ jmp(&entry);
376 __ bind(&loop);
377 __ ldr(r2, MemOperand(sp, kPointerSize, PostIndex));
378 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
379 __ bind(&entry);
380 __ cmp(r4, r5);
381 __ b(lt, &loop);
382
383 // Remove caller arguments and receiver from the stack, setup return value and
384 // return.
385 // r0: argc
386 // r3: JSArray
387 // sp[0]: receiver
388 __ add(sp, sp, Operand(kPointerSize));
389 __ mov(r0, r3);
390 __ Jump(lr);
391}
392
393
394void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
395 // ----------- S t a t e -------------
396 // -- r0 : number of arguments
397 // -- lr : return address
398 // -- sp[...]: constructor arguments
399 // -----------------------------------
400 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
401
402 // Get the Array function.
403 GenerateLoadArrayFunction(masm, r1);
404
405 if (FLAG_debug_code) {
406 // Initial map for the builtin Array function shoud be a map.
407 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
408 __ tst(r2, Operand(kSmiTagMask));
409 __ Assert(ne, "Unexpected initial map for Array function");
410 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
411 __ Assert(eq, "Unexpected initial map for Array function");
412 }
413
414 // Run the native code for the Array function called as a normal function.
415 ArrayNativeCode(masm, &generic_array_code);
416
417 // Jump to the generic array code if the specialized code cannot handle
418 // the construction.
419 __ bind(&generic_array_code);
420 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
421 Handle<Code> array_code(code);
422 __ Jump(array_code, RelocInfo::CODE_TARGET);
423}
424
425
426void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
427 // ----------- S t a t e -------------
428 // -- r0 : number of arguments
429 // -- r1 : constructor function
430 // -- lr : return address
431 // -- sp[...]: constructor arguments
432 // -----------------------------------
433 Label generic_constructor;
434
435 if (FLAG_debug_code) {
436 // The array construct code is only set for the builtin Array function which
437 // always have a map.
438 GenerateLoadArrayFunction(masm, r2);
439 __ cmp(r1, r2);
440 __ Assert(eq, "Unexpected Array function");
441 // Initial map for the builtin Array function should be a map.
442 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
443 __ tst(r2, Operand(kSmiTagMask));
444 __ Assert(ne, "Unexpected initial map for Array function");
445 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
446 __ Assert(eq, "Unexpected initial map for Array function");
447 }
448
449 // Run the native code for the Array function called as a constructor.
450 ArrayNativeCode(masm, &generic_constructor);
451
452 // Jump to the generic construct code in case the specialized code cannot
453 // handle the construction.
454 __ bind(&generic_constructor);
455 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
456 Handle<Code> generic_construct_stub(code);
457 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
458}
459
460
461void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
462 // ----------- S t a t e -------------
463 // -- r0 : number of arguments
464 // -- r1 : constructor function
465 // -- lr : return address
466 // -- sp[...]: constructor arguments
467 // -----------------------------------
468
469 Label non_function_call;
470 // Check that the function is not a smi.
471 __ tst(r1, Operand(kSmiTagMask));
472 __ b(eq, &non_function_call);
473 // Check that the function is a JSFunction.
474 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
475 __ b(ne, &non_function_call);
476
477 // Jump to the function-specific construct stub.
478 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
479 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
480 __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
481
482 // r0: number of arguments
483 // r1: called object
484 __ bind(&non_function_call);
485
486 // Set expected number of arguments to zero (not changing r0).
487 __ mov(r2, Operand(0));
488 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
489 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
490 RelocInfo::CODE_TARGET);
491}
492
493
494void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
495 // Enter a construct frame.
496 __ EnterConstructFrame();
497
498 // Preserve the two incoming parameters on the stack.
499 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
500 __ push(r0); // Smi-tagged arguments count.
501 __ push(r1); // Constructor function.
502
503 // Use r7 for holding undefined which is used in several places below.
504 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
505
506 // Try to allocate the object without transitioning into C code. If any of the
507 // preconditions is not met, the code bails out to the runtime call.
508 Label rt_call, allocated;
509 if (FLAG_inline_new) {
510 Label undo_allocation;
511#ifdef ENABLE_DEBUGGER_SUPPORT
512 ExternalReference debug_step_in_fp =
513 ExternalReference::debug_step_in_fp_address();
514 __ mov(r2, Operand(debug_step_in_fp));
515 __ ldr(r2, MemOperand(r2));
516 __ tst(r2, r2);
517 __ b(nz, &rt_call);
518#endif
519
520 // Load the initial map and verify that it is in fact a map.
521 // r1: constructor function
522 // r7: undefined
523 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
524 __ tst(r2, Operand(kSmiTagMask));
525 __ b(eq, &rt_call);
526 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
527 __ b(ne, &rt_call);
528
529 // Check that the constructor is not constructing a JSFunction (see comments
530 // in Runtime_NewObject in runtime.cc). In which case the initial map's
531 // instance type would be JS_FUNCTION_TYPE.
532 // r1: constructor function
533 // r2: initial map
534 // r7: undefined
535 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
536 __ b(eq, &rt_call);
537
538 // Now allocate the JSObject on the heap.
539 // r1: constructor function
540 // r2: initial map
541 // r7: undefined
542 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
543 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, NO_ALLOCATION_FLAGS);
544
545 // Allocated the JSObject, now initialize the fields. Map is set to initial
546 // map and properties and elements are set to empty fixed array.
547 // r1: constructor function
548 // r2: initial map
549 // r3: object size
550 // r4: JSObject (not tagged)
551 // r7: undefined
552 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
553 __ mov(r5, r4);
554 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
555 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
556 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
557 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
558 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
559 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
560
561 // Fill all the in-object properties with undefined.
562 // r1: constructor function
563 // r2: initial map
564 // r3: object size (in words)
565 // r4: JSObject (not tagged)
566 // r5: First in-object property of JSObject (not tagged)
567 // r7: undefined
568 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
569 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
570 { Label loop, entry;
571 __ b(&entry);
572 __ bind(&loop);
573 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
574 __ bind(&entry);
575 __ cmp(r5, Operand(r6));
576 __ b(lt, &loop);
577 }
578
579 // Add the object tag to make the JSObject real, so that we can continue and
580 // jump into the continuation code at any time from now on. Any failures
581 // need to undo the allocation, so that the heap is in a consistent state
582 // and verifiable.
583 __ add(r4, r4, Operand(kHeapObjectTag));
584
585 // Check if a non-empty properties array is needed. Continue with allocated
586 // object if not fall through to runtime call if it is.
587 // r1: constructor function
588 // r4: JSObject
589 // r5: start of next object (not tagged)
590 // r7: undefined
591 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
592 // The field instance sizes contains both pre-allocated property fields and
593 // in-object properties.
594 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
595 __ and_(r6,
596 r0,
597 Operand(0x000000FF << Map::kPreAllocatedPropertyFieldsByte * 8));
598 __ add(r3, r3, Operand(r6, LSR, Map::kPreAllocatedPropertyFieldsByte * 8));
599 __ and_(r6, r0, Operand(0x000000FF << Map::kInObjectPropertiesByte * 8));
600 __ sub(r3, r3, Operand(r6, LSR, Map::kInObjectPropertiesByte * 8), SetCC);
601
602 // Done if no extra properties are to be allocated.
603 __ b(eq, &allocated);
604 __ Assert(pl, "Property allocation count failed.");
605
606 // Scale the number of elements by pointer size and add the header for
607 // FixedArrays to the start of the next object calculation from above.
608 // r1: constructor
609 // r3: number of elements in properties array
610 // r4: JSObject
611 // r5: start of next object
612 // r7: undefined
613 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
614 __ AllocateInNewSpace(r0,
615 r5,
616 r6,
617 r2,
618 &undo_allocation,
619 RESULT_CONTAINS_TOP);
620
621 // Initialize the FixedArray.
622 // r1: constructor
623 // r3: number of elements in properties array
624 // r4: JSObject
625 // r5: FixedArray (not tagged)
626 // r7: undefined
627 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
628 __ mov(r2, r5);
629 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
630 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
631 ASSERT_EQ(1 * kPointerSize, Array::kLengthOffset);
632 __ str(r3, MemOperand(r2, kPointerSize, PostIndex));
633
634 // Initialize the fields to undefined.
635 // r1: constructor function
636 // r2: First element of FixedArray (not tagged)
637 // r3: number of elements in properties array
638 // r4: JSObject
639 // r5: FixedArray (not tagged)
640 // r7: undefined
641 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
642 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
643 { Label loop, entry;
644 __ b(&entry);
645 __ bind(&loop);
646 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
647 __ bind(&entry);
648 __ cmp(r2, Operand(r6));
649 __ b(lt, &loop);
650 }
651
652 // Store the initialized FixedArray into the properties field of
653 // the JSObject
654 // r1: constructor function
655 // r4: JSObject
656 // r5: FixedArray (not tagged)
657 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
658 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
659
660 // Continue with JSObject being successfully allocated
661 // r1: constructor function
662 // r4: JSObject
663 __ jmp(&allocated);
664
665 // Undo the setting of the new top so that the heap is verifiable. For
666 // example, the map's unused properties potentially do not match the
667 // allocated objects unused properties.
668 // r4: JSObject (previous new top)
669 __ bind(&undo_allocation);
670 __ UndoAllocationInNewSpace(r4, r5);
671 }
672
673 // Allocate the new receiver object using the runtime call.
674 // r1: constructor function
675 __ bind(&rt_call);
676 __ push(r1); // argument for Runtime_NewObject
677 __ CallRuntime(Runtime::kNewObject, 1);
678 __ mov(r4, r0);
679
680 // Receiver for constructor call allocated.
681 // r4: JSObject
682 __ bind(&allocated);
683 __ push(r4);
684
685 // Push the function and the allocated receiver from the stack.
686 // sp[0]: receiver (newly allocated object)
687 // sp[1]: constructor function
688 // sp[2]: number of arguments (smi-tagged)
689 __ ldr(r1, MemOperand(sp, kPointerSize));
690 __ push(r1); // Constructor function.
691 __ push(r4); // Receiver.
692
693 // Reload the number of arguments from the stack.
694 // r1: constructor function
695 // sp[0]: receiver
696 // sp[1]: constructor function
697 // sp[2]: receiver
698 // sp[3]: constructor function
699 // sp[4]: number of arguments (smi-tagged)
700 __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
701
702 // Setup pointer to last argument.
703 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
704
705 // Setup number of arguments for function call below
706 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
707
708 // Copy arguments and receiver to the expression stack.
709 // r0: number of arguments
710 // r2: address of last argument (caller sp)
711 // r1: constructor function
712 // r3: number of arguments (smi-tagged)
713 // sp[0]: receiver
714 // sp[1]: constructor function
715 // sp[2]: receiver
716 // sp[3]: constructor function
717 // sp[4]: number of arguments (smi-tagged)
718 Label loop, entry;
719 __ b(&entry);
720 __ bind(&loop);
721 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
722 __ push(ip);
723 __ bind(&entry);
724 __ sub(r3, r3, Operand(2), SetCC);
725 __ b(ge, &loop);
726
727 // Call the function.
728 // r0: number of arguments
729 // r1: constructor function
730 ParameterCount actual(r0);
731 __ InvokeFunction(r1, actual, CALL_FUNCTION);
732
733 // Pop the function from the stack.
734 // sp[0]: constructor function
735 // sp[2]: receiver
736 // sp[3]: constructor function
737 // sp[4]: number of arguments (smi-tagged)
738 __ pop();
739
740 // Restore context from the frame.
741 // r0: result
742 // sp[0]: receiver
743 // sp[1]: constructor function
744 // sp[2]: number of arguments (smi-tagged)
745 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
746
747 // If the result is an object (in the ECMA sense), we should get rid
748 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
749 // on page 74.
750 Label use_receiver, exit;
751
752 // If the result is a smi, it is *not* an object in the ECMA sense.
753 // r0: result
754 // sp[0]: receiver (newly allocated object)
755 // sp[1]: constructor function
756 // sp[2]: number of arguments (smi-tagged)
757 __ tst(r0, Operand(kSmiTagMask));
758 __ b(eq, &use_receiver);
759
760 // If the type of the result (stored in its map) is less than
761 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
762 __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
763 __ b(ge, &exit);
764
765 // Throw away the result of the constructor invocation and use the
766 // on-stack receiver as the result.
767 __ bind(&use_receiver);
768 __ ldr(r0, MemOperand(sp));
769
770 // Remove receiver from the stack, remove caller arguments, and
771 // return.
772 __ bind(&exit);
773 // r0: result
774 // sp[0]: receiver (newly allocated object)
775 // sp[1]: constructor function
776 // sp[2]: number of arguments (smi-tagged)
777 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
778 __ LeaveConstructFrame();
779 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
780 __ add(sp, sp, Operand(kPointerSize));
781 __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
782 __ Jump(lr);
783}
784
785
786static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
787 bool is_construct) {
788 // Called from Generate_JS_Entry
789 // r0: code entry
790 // r1: function
791 // r2: receiver
792 // r3: argc
793 // r4: argv
794 // r5-r7, cp may be clobbered
795
796 // Clear the context before we push it when entering the JS frame.
797 __ mov(cp, Operand(0));
798
799 // Enter an internal frame.
800 __ EnterInternalFrame();
801
802 // Set up the context from the function argument.
803 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
804
805 // Set up the roots register.
806 ExternalReference roots_address = ExternalReference::roots_address();
807 __ mov(r10, Operand(roots_address));
808
809 // Push the function and the receiver onto the stack.
810 __ push(r1);
811 __ push(r2);
812
813 // Copy arguments to the stack in a loop.
814 // r1: function
815 // r3: argc
816 // r4: argv, i.e. points to first arg
817 Label loop, entry;
818 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
819 // r2 points past last arg.
820 __ b(&entry);
821 __ bind(&loop);
822 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
823 __ ldr(r0, MemOperand(r0)); // dereference handle
824 __ push(r0); // push parameter
825 __ bind(&entry);
826 __ cmp(r4, Operand(r2));
827 __ b(ne, &loop);
828
829 // Initialize all JavaScript callee-saved registers, since they will be seen
830 // by the garbage collector as part of handlers.
831 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
832 __ mov(r5, Operand(r4));
833 __ mov(r6, Operand(r4));
834 __ mov(r7, Operand(r4));
835 if (kR9Available == 1) {
836 __ mov(r9, Operand(r4));
837 }
838
839 // Invoke the code and pass argc as r0.
840 __ mov(r0, Operand(r3));
841 if (is_construct) {
842 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
843 RelocInfo::CODE_TARGET);
844 } else {
845 ParameterCount actual(r0);
846 __ InvokeFunction(r1, actual, CALL_FUNCTION);
847 }
848
849 // Exit the JS frame and remove the parameters (except function), and return.
850 // Respect ABI stack constraint.
851 __ LeaveInternalFrame();
852 __ Jump(lr);
853
854 // r0: result
855}
856
857
858void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
859 Generate_JSEntryTrampolineHelper(masm, false);
860}
861
862
863void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
864 Generate_JSEntryTrampolineHelper(masm, true);
865}
866
867
868void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
869 // 1. Make sure we have at least one argument.
870 // r0: actual number of argument
871 { Label done;
872 __ tst(r0, Operand(r0));
873 __ b(ne, &done);
874 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
875 __ push(r2);
876 __ add(r0, r0, Operand(1));
877 __ bind(&done);
878 }
879
880 // 2. Get the function to call from the stack.
881 // r0: actual number of argument
882 { Label done, non_function, function;
883 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
884 __ tst(r1, Operand(kSmiTagMask));
885 __ b(eq, &non_function);
886 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
887 __ b(eq, &function);
888
889 // Non-function called: Clear the function to force exception.
890 __ bind(&non_function);
891 __ mov(r1, Operand(0));
892 __ b(&done);
893
894 // Change the context eagerly because it will be used below to get the
895 // right global object.
896 __ bind(&function);
897 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
898
899 __ bind(&done);
900 }
901
902 // 3. Make sure first argument is an object; convert if necessary.
903 // r0: actual number of arguments
904 // r1: function
905 { Label call_to_object, use_global_receiver, patch_receiver, done;
906 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
907 __ ldr(r2, MemOperand(r2, -kPointerSize));
908
909 // r0: actual number of arguments
910 // r1: function
911 // r2: first argument
912 __ tst(r2, Operand(kSmiTagMask));
913 __ b(eq, &call_to_object);
914
915 __ LoadRoot(r3, Heap::kNullValueRootIndex);
916 __ cmp(r2, r3);
917 __ b(eq, &use_global_receiver);
918 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
919 __ cmp(r2, r3);
920 __ b(eq, &use_global_receiver);
921
922 __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE);
923 __ b(lt, &call_to_object);
924 __ cmp(r3, Operand(LAST_JS_OBJECT_TYPE));
925 __ b(le, &done);
926
927 __ bind(&call_to_object);
928 __ EnterInternalFrame();
929
930 // Store number of arguments and function across the call into the runtime.
931 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
932 __ push(r0);
933 __ push(r1);
934
935 __ push(r2);
936 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
937 __ mov(r2, r0);
938
939 // Restore number of arguments and function.
940 __ pop(r1);
941 __ pop(r0);
942 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
943
944 __ LeaveInternalFrame();
945 __ b(&patch_receiver);
946
947 // Use the global receiver object from the called function as the receiver.
948 __ bind(&use_global_receiver);
949 const int kGlobalIndex =
950 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
951 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
952 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
953
954 __ bind(&patch_receiver);
955 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
956 __ str(r2, MemOperand(r3, -kPointerSize));
957
958 __ bind(&done);
959 }
960
961 // 4. Shift stuff one slot down the stack
962 // r0: actual number of arguments (including call() receiver)
963 // r1: function
964 { Label loop;
965 // Calculate the copy start address (destination). Copy end address is sp.
966 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
967 __ add(r2, r2, Operand(kPointerSize)); // copy receiver too
968
969 __ bind(&loop);
970 __ ldr(ip, MemOperand(r2, -kPointerSize));
971 __ str(ip, MemOperand(r2));
972 __ sub(r2, r2, Operand(kPointerSize));
973 __ cmp(r2, sp);
974 __ b(ne, &loop);
975 }
976
977 // 5. Adjust the actual number of arguments and remove the top element.
978 // r0: actual number of arguments (including call() receiver)
979 // r1: function
980 __ sub(r0, r0, Operand(1));
981 __ add(sp, sp, Operand(kPointerSize));
982
983 // 6. Get the code for the function or the non-function builtin.
984 // If number of expected arguments matches, then call. Otherwise restart
985 // the arguments adaptor stub.
986 // r0: actual number of arguments
987 // r1: function
988 { Label invoke;
989 __ tst(r1, r1);
990 __ b(ne, &invoke);
991 __ mov(r2, Operand(0)); // expected arguments is 0 for CALL_NON_FUNCTION
992 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
993 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
994 RelocInfo::CODE_TARGET);
995
996 __ bind(&invoke);
997 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
998 __ ldr(r2,
999 FieldMemOperand(r3,
1000 SharedFunctionInfo::kFormalParameterCountOffset));
1001 __ ldr(r3,
1002 MemOperand(r3, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
1003 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1004 __ cmp(r2, r0); // Check formal and actual parameter counts.
1005 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
1006 RelocInfo::CODE_TARGET, ne);
1007
1008 // 7. Jump to the code in r3 without checking arguments.
1009 ParameterCount expected(0);
1010 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION);
1011 }
1012}
1013
1014
1015void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1016 const int kIndexOffset = -5 * kPointerSize;
1017 const int kLimitOffset = -4 * kPointerSize;
1018 const int kArgsOffset = 2 * kPointerSize;
1019 const int kRecvOffset = 3 * kPointerSize;
1020 const int kFunctionOffset = 4 * kPointerSize;
1021
1022 __ EnterInternalFrame();
1023
1024 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1025 __ push(r0);
1026 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1027 __ push(r0);
1028 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_JS);
1029
1030 Label no_preemption, retry_preemption;
1031 __ bind(&retry_preemption);
1032 ExternalReference stack_guard_limit_address =
1033 ExternalReference::address_of_stack_guard_limit();
1034 __ mov(r2, Operand(stack_guard_limit_address));
1035 __ ldr(r2, MemOperand(r2));
1036 __ cmp(sp, r2);
1037 __ b(hi, &no_preemption);
1038
1039 // We have encountered a preemption or stack overflow already before we push
1040 // the array contents. Save r0 which is the Smi-tagged length of the array.
1041 __ push(r0);
1042
1043 // Runtime routines expect at least one argument, so give it a Smi.
1044 __ mov(r0, Operand(Smi::FromInt(0)));
1045 __ push(r0);
1046 __ CallRuntime(Runtime::kStackGuard, 1);
1047
1048 // Since we returned, it wasn't a stack overflow. Restore r0 and try again.
1049 __ pop(r0);
1050 __ b(&retry_preemption);
1051
1052 __ bind(&no_preemption);
1053
1054 // Eagerly check for stack-overflow before starting to push the arguments.
1055 // r0: number of arguments.
1056 // r2: stack limit.
1057 Label okay;
1058 __ sub(r2, sp, r2);
1059
1060 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1061 __ b(hi, &okay);
1062
1063 // Out of stack space.
1064 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1065 __ push(r1);
1066 __ push(r0);
1067 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_JS);
1068
1069 // Push current limit and index.
1070 __ bind(&okay);
1071 __ push(r0); // limit
1072 __ mov(r1, Operand(0)); // initial index
1073 __ push(r1);
1074
1075 // Change context eagerly to get the right global object if necessary.
1076 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1077 __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
1078
1079 // Compute the receiver.
1080 Label call_to_object, use_global_receiver, push_receiver;
1081 __ ldr(r0, MemOperand(fp, kRecvOffset));
1082 __ tst(r0, Operand(kSmiTagMask));
1083 __ b(eq, &call_to_object);
1084 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1085 __ cmp(r0, r1);
1086 __ b(eq, &use_global_receiver);
1087 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1088 __ cmp(r0, r1);
1089 __ b(eq, &use_global_receiver);
1090
1091 // Check if the receiver is already a JavaScript object.
1092 // r0: receiver
1093 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
1094 __ b(lt, &call_to_object);
1095 __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
1096 __ b(le, &push_receiver);
1097
1098 // Convert the receiver to a regular object.
1099 // r0: receiver
1100 __ bind(&call_to_object);
1101 __ push(r0);
1102 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
1103 __ b(&push_receiver);
1104
1105 // Use the current global receiver object as the receiver.
1106 __ bind(&use_global_receiver);
1107 const int kGlobalOffset =
1108 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1109 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1110 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1111
1112 // Push the receiver.
1113 // r0: receiver
1114 __ bind(&push_receiver);
1115 __ push(r0);
1116
1117 // Copy all arguments from the array to the stack.
1118 Label entry, loop;
1119 __ ldr(r0, MemOperand(fp, kIndexOffset));
1120 __ b(&entry);
1121
1122 // Load the current argument from the arguments array and push it to the
1123 // stack.
1124 // r0: current argument index
1125 __ bind(&loop);
1126 __ ldr(r1, MemOperand(fp, kArgsOffset));
1127 __ push(r1);
1128 __ push(r0);
1129
1130 // Call the runtime to access the property in the arguments array.
1131 __ CallRuntime(Runtime::kGetProperty, 2);
1132 __ push(r0);
1133
1134 // Use inline caching to access the arguments.
1135 __ ldr(r0, MemOperand(fp, kIndexOffset));
1136 __ add(r0, r0, Operand(1 << kSmiTagSize));
1137 __ str(r0, MemOperand(fp, kIndexOffset));
1138
1139 // Test if the copy loop has finished copying all the elements from the
1140 // arguments object.
1141 __ bind(&entry);
1142 __ ldr(r1, MemOperand(fp, kLimitOffset));
1143 __ cmp(r0, r1);
1144 __ b(ne, &loop);
1145
1146 // Invoke the function.
1147 ParameterCount actual(r0);
1148 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1149 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1150 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1151
1152 // Tear down the internal frame and remove function, receiver and args.
1153 __ LeaveInternalFrame();
1154 __ add(sp, sp, Operand(3 * kPointerSize));
1155 __ Jump(lr);
1156}
1157
1158
1159static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1160 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1161 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1162 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1163 __ add(fp, sp, Operand(3 * kPointerSize));
1164}
1165
1166
1167static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1168 // ----------- S t a t e -------------
1169 // -- r0 : result being passed through
1170 // -----------------------------------
1171 // Get the number of arguments passed (as a smi), tear down the frame and
1172 // then tear down the parameters.
1173 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1174 __ mov(sp, fp);
1175 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1176 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1177 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1178}
1179
1180
1181void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1182 // ----------- S t a t e -------------
1183 // -- r0 : actual number of arguments
1184 // -- r1 : function (passed through to callee)
1185 // -- r2 : expected number of arguments
1186 // -- r3 : code entry to call
1187 // -----------------------------------
1188
1189 Label invoke, dont_adapt_arguments;
1190
1191 Label enough, too_few;
1192 __ cmp(r0, Operand(r2));
1193 __ b(lt, &too_few);
1194 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1195 __ b(eq, &dont_adapt_arguments);
1196
1197 { // Enough parameters: actual >= expected
1198 __ bind(&enough);
1199 EnterArgumentsAdaptorFrame(masm);
1200
1201 // Calculate copy start address into r0 and copy end address into r2.
1202 // r0: actual number of arguments as a smi
1203 // r1: function
1204 // r2: expected number of arguments
1205 // r3: code entry to call
1206 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1207 // adjust for return address and receiver
1208 __ add(r0, r0, Operand(2 * kPointerSize));
1209 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1210
1211 // Copy the arguments (including the receiver) to the new stack frame.
1212 // r0: copy start address
1213 // r1: function
1214 // r2: copy end address
1215 // r3: code entry to call
1216
1217 Label copy;
1218 __ bind(&copy);
1219 __ ldr(ip, MemOperand(r0, 0));
1220 __ push(ip);
1221 __ cmp(r0, r2); // Compare before moving to next argument.
1222 __ sub(r0, r0, Operand(kPointerSize));
1223 __ b(ne, &copy);
1224
1225 __ b(&invoke);
1226 }
1227
1228 { // Too few parameters: Actual < expected
1229 __ bind(&too_few);
1230 EnterArgumentsAdaptorFrame(masm);
1231
1232 // Calculate copy start address into r0 and copy end address is fp.
1233 // r0: actual number of arguments as a smi
1234 // r1: function
1235 // r2: expected number of arguments
1236 // r3: code entry to call
1237 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1238
1239 // Copy the arguments (including the receiver) to the new stack frame.
1240 // r0: copy start address
1241 // r1: function
1242 // r2: expected number of arguments
1243 // r3: code entry to call
1244 Label copy;
1245 __ bind(&copy);
1246 // Adjust load for return address and receiver.
1247 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1248 __ push(ip);
1249 __ cmp(r0, fp); // Compare before moving to next argument.
1250 __ sub(r0, r0, Operand(kPointerSize));
1251 __ b(ne, &copy);
1252
1253 // Fill the remaining expected arguments with undefined.
1254 // r1: function
1255 // r2: expected number of arguments
1256 // r3: code entry to call
1257 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1258 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1259 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1260
1261 Label fill;
1262 __ bind(&fill);
1263 __ push(ip);
1264 __ cmp(sp, r2);
1265 __ b(ne, &fill);
1266 }
1267
1268 // Call the entry point.
1269 __ bind(&invoke);
1270 __ Call(r3);
1271
1272 // Exit frame and return.
1273 LeaveArgumentsAdaptorFrame(masm);
1274 __ Jump(lr);
1275
1276
1277 // -------------------------------------------
1278 // Dont adapt arguments.
1279 // -------------------------------------------
1280 __ bind(&dont_adapt_arguments);
1281 __ Jump(r3);
1282}
1283
1284
1285#undef __
1286
1287} } // namespace v8::internal