blob: 0b95bba60806991115769df27881d4372820fb2b [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29#include "codegen-inl.h"
30#include "macro-assembler.h"
31
32namespace v8 {
33namespace internal {
34
35#define __ ACCESS_MASM(masm)
36
Steve Blocka7e24c12009-10-30 11:49:00 +000037
Leon Clarkee46be812010-01-19 14:06:41 +000038void Builtins::Generate_Adaptor(MacroAssembler* masm,
39 CFunctionId id,
40 BuiltinExtraArguments extra_args) {
41 // ----------- S t a t e -------------
42 // -- rax : number of arguments excluding receiver
43 // -- rdi : called function (only guaranteed when
44 // extra_args requires it)
45 // -- rsi : context
46 // -- rsp[0] : return address
47 // -- rsp[8] : last argument
48 // -- ...
49 // -- rsp[8 * argc] : first argument (argc == rax)
50 // -- rsp[8 * (argc +1)] : receiver
51 // -----------------------------------
52
53 // Insert extra arguments.
54 int num_extra_args = 0;
55 if (extra_args == NEEDS_CALLED_FUNCTION) {
56 num_extra_args = 1;
57 __ pop(kScratchRegister); // Save return address.
58 __ push(rdi);
59 __ push(kScratchRegister); // Restore return address.
60 } else {
61 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
62 }
63
64 // JumpToRuntime expects rax to contain the number of arguments
65 // including the receiver and the extra arguments.
66 __ addq(rax, Immediate(num_extra_args + 1));
Steve Blocka7e24c12009-10-30 11:49:00 +000067 __ JumpToRuntime(ExternalReference(id), 1);
68}
69
70
71static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
72 __ push(rbp);
73 __ movq(rbp, rsp);
74
75 // Store the arguments adaptor context sentinel.
Steve Block3ce2e202009-11-05 08:53:23 +000076 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +000077
78 // Push the function on the stack.
79 __ push(rdi);
80
81 // Preserve the number of arguments on the stack. Must preserve both
82 // rax and rbx because these registers are used when copying the
83 // arguments and the receiver.
84 __ Integer32ToSmi(rcx, rax);
85 __ push(rcx);
86}
87
88
89static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
90 // Retrieve the number of arguments from the stack. Number is a Smi.
91 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
92
93 // Leave the frame.
94 __ movq(rsp, rbp);
95 __ pop(rbp);
96
97 // Remove caller arguments from the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +000098 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +000099 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
100 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000101 __ push(rcx);
102}
103
104
105void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
106 // ----------- S t a t e -------------
107 // -- rax : actual number of arguments
108 // -- rbx : expected number of arguments
109 // -- rdx : code entry to call
110 // -----------------------------------
111
112 Label invoke, dont_adapt_arguments;
113 __ IncrementCounter(&Counters::arguments_adaptors, 1);
114
115 Label enough, too_few;
116 __ cmpq(rax, rbx);
117 __ j(less, &too_few);
118 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
119 __ j(equal, &dont_adapt_arguments);
120
121 { // Enough parameters: Actual >= expected.
122 __ bind(&enough);
123 EnterArgumentsAdaptorFrame(masm);
124
125 // Copy receiver and all expected arguments.
126 const int offset = StandardFrameConstants::kCallerSPOffset;
127 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
128 __ movq(rcx, Immediate(-1)); // account for receiver
129
130 Label copy;
131 __ bind(&copy);
132 __ incq(rcx);
133 __ push(Operand(rax, 0));
134 __ subq(rax, Immediate(kPointerSize));
135 __ cmpq(rcx, rbx);
136 __ j(less, &copy);
137 __ jmp(&invoke);
138 }
139
140 { // Too few parameters: Actual < expected.
141 __ bind(&too_few);
142 EnterArgumentsAdaptorFrame(masm);
143
144 // Copy receiver and all actual arguments.
145 const int offset = StandardFrameConstants::kCallerSPOffset;
146 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
147 __ movq(rcx, Immediate(-1)); // account for receiver
148
149 Label copy;
150 __ bind(&copy);
151 __ incq(rcx);
152 __ push(Operand(rdi, 0));
153 __ subq(rdi, Immediate(kPointerSize));
154 __ cmpq(rcx, rax);
155 __ j(less, &copy);
156
157 // Fill remaining expected arguments with undefined values.
158 Label fill;
159 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
160 __ bind(&fill);
161 __ incq(rcx);
162 __ push(kScratchRegister);
163 __ cmpq(rcx, rbx);
164 __ j(less, &fill);
165
166 // Restore function pointer.
167 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
168 }
169
170 // Call the entry point.
171 __ bind(&invoke);
172 __ call(rdx);
173
174 // Leave frame and return.
175 LeaveArgumentsAdaptorFrame(masm);
176 __ ret(0);
177
178 // -------------------------------------------
179 // Dont adapt arguments.
180 // -------------------------------------------
181 __ bind(&dont_adapt_arguments);
182 __ jmp(rdx);
183}
184
185
186void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
187 // Stack Layout:
188 // rsp: return address
189 // +1: Argument n
190 // +2: Argument n-1
191 // ...
192 // +n: Argument 1 = receiver
193 // +n+1: Argument 0 = function to call
194 //
195 // rax contains the number of arguments, n, not counting the function.
196 //
197 // 1. Make sure we have at least one argument.
198 { Label done;
199 __ testq(rax, rax);
200 __ j(not_zero, &done);
201 __ pop(rbx);
202 __ Push(Factory::undefined_value());
203 __ push(rbx);
204 __ incq(rax);
205 __ bind(&done);
206 }
207
208 // 2. Get the function to call from the stack.
209 { Label done, non_function, function;
210 // The function to call is at position n+1 on the stack.
211 __ movq(rdi, Operand(rsp, rax, times_pointer_size, +1 * kPointerSize));
212 __ JumpIfSmi(rdi, &non_function);
213 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
214 __ j(equal, &function);
215
216 // Non-function called: Clear the function to force exception.
217 __ bind(&non_function);
218 __ xor_(rdi, rdi);
219 __ jmp(&done);
220
221 // Function called: Change context eagerly to get the right global object.
222 __ bind(&function);
223 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
224
225 __ bind(&done);
226 }
227
228 // 3. Make sure first argument is an object; convert if necessary.
229 { Label call_to_object, use_global_receiver, patch_receiver, done;
230 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
231
232 __ JumpIfSmi(rbx, &call_to_object);
233
234 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
235 __ j(equal, &use_global_receiver);
236 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
237 __ j(equal, &use_global_receiver);
238
239 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
240 __ j(below, &call_to_object);
241 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
242 __ j(below_equal, &done);
243
244 __ bind(&call_to_object);
245 __ EnterInternalFrame(); // preserves rax, rbx, rdi
246
247 // Store the arguments count on the stack (smi tagged).
248 __ Integer32ToSmi(rax, rax);
249 __ push(rax);
250
251 __ push(rdi); // save edi across the call
252 __ push(rbx);
253 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
254 __ movq(rbx, rax);
255 __ pop(rdi); // restore edi after the call
256
257 // Get the arguments count and untag it.
258 __ pop(rax);
259 __ SmiToInteger32(rax, rax);
260
261 __ LeaveInternalFrame();
262 __ jmp(&patch_receiver);
263
264 // Use the global receiver object from the called function as the receiver.
265 __ bind(&use_global_receiver);
266 const int kGlobalIndex =
267 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
268 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
Steve Blockd0582a62009-12-15 09:54:21 +0000269 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
270 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
272
273 __ bind(&patch_receiver);
274 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
275
276 __ bind(&done);
277 }
278
279 // 4. Shift stuff one slot down the stack.
280 { Label loop;
281 __ lea(rcx, Operand(rax, +1)); // +1 ~ copy receiver too
282 __ bind(&loop);
283 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
284 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
285 __ decq(rcx);
286 __ j(not_zero, &loop);
287 }
288
289 // 5. Remove TOS (copy of last arguments), but keep return address.
290 __ pop(rbx);
291 __ pop(rcx);
292 __ push(rbx);
293 __ decq(rax);
294
295 // 6. Check that function really was a function and get the code to
296 // call from the function and check that the number of expected
297 // arguments matches what we're providing.
298 { Label invoke, trampoline;
299 __ testq(rdi, rdi);
300 __ j(not_zero, &invoke);
301 __ xor_(rbx, rbx);
302 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
303 __ bind(&trampoline);
304 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
305 RelocInfo::CODE_TARGET);
306
307 __ bind(&invoke);
308 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
309 __ movsxlq(rbx,
310 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
311 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
312 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
313 __ cmpq(rax, rbx);
314 __ j(not_equal, &trampoline);
315 }
316
317 // 7. Jump (tail-call) to the code in register edx without checking arguments.
318 ParameterCount expected(0);
319 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
320}
321
322
323void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
324 // Stack at entry:
325 // rsp: return address
326 // rsp+8: arguments
327 // rsp+16: receiver ("this")
328 // rsp+24: function
329 __ EnterInternalFrame();
330 // Stack frame:
331 // rbp: Old base pointer
332 // rbp[1]: return address
333 // rbp[2]: function arguments
334 // rbp[3]: receiver
335 // rbp[4]: function
336 static const int kArgumentsOffset = 2 * kPointerSize;
337 static const int kReceiverOffset = 3 * kPointerSize;
338 static const int kFunctionOffset = 4 * kPointerSize;
339 __ push(Operand(rbp, kFunctionOffset));
340 __ push(Operand(rbp, kArgumentsOffset));
341 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
342
Steve Blockd0582a62009-12-15 09:54:21 +0000343 // Check the stack for overflow. We are not trying need to catch
344 // interruptions (e.g. debug break and preemption) here, so the "real stack
345 // limit" is checked.
346 Label okay;
347 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
348 __ movq(rcx, rsp);
349 // Make rcx the space we have left. The stack might already be overflowed
350 // here which will cause rcx to become negative.
351 __ subq(rcx, kScratchRegister);
352 // Make rdx the space we need for the array when it is unrolled onto the
353 // stack.
354 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
355 // Check if the arguments will overflow the stack.
356 __ cmpq(rcx, rdx);
357 __ j(greater, &okay); // Signed comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +0000358
Steve Blockd0582a62009-12-15 09:54:21 +0000359 // Out of stack space.
360 __ push(Operand(rbp, kFunctionOffset));
361 __ push(rax);
362 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
363 __ bind(&okay);
364 // End of stack check.
Steve Blocka7e24c12009-10-30 11:49:00 +0000365
366 // Push current index and limit.
367 const int kLimitOffset =
368 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
369 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
370 __ push(rax); // limit
371 __ push(Immediate(0)); // index
372
373 // Change context eagerly to get the right global object if
374 // necessary.
375 __ movq(rdi, Operand(rbp, kFunctionOffset));
376 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
377
378 // Compute the receiver.
379 Label call_to_object, use_global_receiver, push_receiver;
380 __ movq(rbx, Operand(rbp, kReceiverOffset));
381 __ JumpIfSmi(rbx, &call_to_object);
382 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
383 __ j(equal, &use_global_receiver);
384 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
385 __ j(equal, &use_global_receiver);
386
387 // If given receiver is already a JavaScript object then there's no
388 // reason for converting it.
389 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
390 __ j(below, &call_to_object);
391 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
392 __ j(below_equal, &push_receiver);
393
394 // Convert the receiver to an object.
395 __ bind(&call_to_object);
396 __ push(rbx);
397 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
398 __ movq(rbx, rax);
399 __ jmp(&push_receiver);
400
401 // Use the current global receiver object as the receiver.
402 __ bind(&use_global_receiver);
403 const int kGlobalOffset =
404 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
405 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
Steve Blockd0582a62009-12-15 09:54:21 +0000406 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
407 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000408 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
409
410 // Push the receiver.
411 __ bind(&push_receiver);
412 __ push(rbx);
413
414 // Copy all arguments from the array to the stack.
415 Label entry, loop;
416 __ movq(rax, Operand(rbp, kIndexOffset));
417 __ jmp(&entry);
418 __ bind(&loop);
419 __ movq(rcx, Operand(rbp, kArgumentsOffset)); // load arguments
420 __ push(rcx);
421 __ push(rax);
422
423 // Use inline caching to speed up access to arguments.
424 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
425 __ Call(ic, RelocInfo::CODE_TARGET);
426 // It is important that we do not have a test instruction after the
427 // call. A test instruction after the call is used to indicate that
428 // we have generated an inline version of the keyed load. In this
429 // case, we know that we are not generating a test instruction next.
430
431 // Remove IC arguments from the stack and push the nth argument.
432 __ addq(rsp, Immediate(2 * kPointerSize));
433 __ push(rax);
434
435 // Update the index on the stack and in register rax.
436 __ movq(rax, Operand(rbp, kIndexOffset));
Steve Block3ce2e202009-11-05 08:53:23 +0000437 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000438 __ movq(Operand(rbp, kIndexOffset), rax);
439
440 __ bind(&entry);
441 __ cmpq(rax, Operand(rbp, kLimitOffset));
442 __ j(not_equal, &loop);
443
444 // Invoke the function.
445 ParameterCount actual(rax);
446 __ SmiToInteger32(rax, rax);
447 __ movq(rdi, Operand(rbp, kFunctionOffset));
448 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
449
450 __ LeaveInternalFrame();
451 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
452}
453
454
455// Load the built-in Array function from the current context.
456static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
457 // Load the global context.
458 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
459 __ movq(result, FieldOperand(result, GlobalObject::kGlobalContextOffset));
460 // Load the Array function from the global context.
461 __ movq(result,
462 Operand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
463}
464
465
466// Number of empty elements to allocate for an empty array.
467static const int kPreallocatedArrayElements = 4;
468
469
470// Allocate an empty JSArray. The allocated array is put into the result
471// register. If the parameter initial_capacity is larger than zero an elements
472// backing store is allocated with this size and filled with the hole values.
473// Otherwise the elements backing store is set to the empty FixedArray.
474static void AllocateEmptyJSArray(MacroAssembler* masm,
475 Register array_function,
476 Register result,
477 Register scratch1,
478 Register scratch2,
479 Register scratch3,
480 int initial_capacity,
481 Label* gc_required) {
482 ASSERT(initial_capacity >= 0);
483
484 // Load the initial map from the array function.
485 __ movq(scratch1, FieldOperand(array_function,
486 JSFunction::kPrototypeOrInitialMapOffset));
487
488 // Allocate the JSArray object together with space for a fixed array with the
489 // requested elements.
490 int size = JSArray::kSize;
491 if (initial_capacity > 0) {
492 size += FixedArray::SizeFor(initial_capacity);
493 }
494 __ AllocateInNewSpace(size,
495 result,
496 scratch2,
497 scratch3,
498 gc_required,
499 TAG_OBJECT);
500
501 // Allocated the JSArray. Now initialize the fields except for the elements
502 // array.
503 // result: JSObject
504 // scratch1: initial map
505 // scratch2: start of next object
506 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
507 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
508 Factory::empty_fixed_array());
509 // Field JSArray::kElementsOffset is initialized later.
Steve Block3ce2e202009-11-05 08:53:23 +0000510 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000511
512 // If no storage is requested for the elements array just set the empty
513 // fixed array.
514 if (initial_capacity == 0) {
515 __ Move(FieldOperand(result, JSArray::kElementsOffset),
516 Factory::empty_fixed_array());
517 return;
518 }
519
520 // Calculate the location of the elements array and set elements array member
521 // of the JSArray.
522 // result: JSObject
523 // scratch2: start of next object
524 __ lea(scratch1, Operand(result, JSArray::kSize));
525 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
526
527 // Initialize the FixedArray and fill it with holes. FixedArray length is not
528 // stored as a smi.
529 // result: JSObject
530 // scratch1: elements array
531 // scratch2: start of next object
532 __ Move(FieldOperand(scratch1, JSObject::kMapOffset),
533 Factory::fixed_array_map());
534 __ movq(FieldOperand(scratch1, Array::kLengthOffset),
535 Immediate(initial_capacity));
536
537 // Fill the FixedArray with the hole value. Inline the code if short.
538 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
539 static const int kLoopUnfoldLimit = 4;
540 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
541 __ Move(scratch3, Factory::the_hole_value());
542 if (initial_capacity <= kLoopUnfoldLimit) {
543 // Use a scratch register here to have only one reloc info when unfolding
544 // the loop.
545 for (int i = 0; i < initial_capacity; i++) {
546 __ movq(FieldOperand(scratch1,
547 FixedArray::kHeaderSize + i * kPointerSize),
548 scratch3);
549 }
550 } else {
551 Label loop, entry;
552 __ jmp(&entry);
553 __ bind(&loop);
554 __ movq(Operand(scratch1, 0), scratch3);
555 __ addq(scratch1, Immediate(kPointerSize));
556 __ bind(&entry);
557 __ cmpq(scratch1, scratch2);
558 __ j(below, &loop);
559 }
560}
561
562
563// Allocate a JSArray with the number of elements stored in a register. The
564// register array_function holds the built-in Array function and the register
565// array_size holds the size of the array as a smi. The allocated array is put
566// into the result register and beginning and end of the FixedArray elements
567// storage is put into registers elements_array and elements_array_end (see
568// below for when that is not the case). If the parameter fill_with_holes is
569// true the allocated elements backing store is filled with the hole values
570// otherwise it is left uninitialized. When the backing store is filled the
571// register elements_array is scratched.
572static void AllocateJSArray(MacroAssembler* masm,
573 Register array_function, // Array function.
574 Register array_size, // As a smi.
575 Register result,
576 Register elements_array,
577 Register elements_array_end,
578 Register scratch,
579 bool fill_with_hole,
580 Label* gc_required) {
581 Label not_empty, allocated;
582
583 // Load the initial map from the array function.
584 __ movq(elements_array,
585 FieldOperand(array_function,
586 JSFunction::kPrototypeOrInitialMapOffset));
587
588 // Check whether an empty sized array is requested.
589 __ testq(array_size, array_size);
590 __ j(not_zero, &not_empty);
591
592 // If an empty array is requested allocate a small elements array anyway. This
593 // keeps the code below free of special casing for the empty array.
594 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
595 __ AllocateInNewSpace(size,
596 result,
597 elements_array_end,
598 scratch,
599 gc_required,
600 TAG_OBJECT);
601 __ jmp(&allocated);
602
603 // Allocate the JSArray object together with space for a FixedArray with the
604 // requested elements.
605 __ bind(&not_empty);
606 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
607 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
608 times_half_pointer_size, // array_size is a smi.
609 array_size,
610 result,
611 elements_array_end,
612 scratch,
613 gc_required,
614 TAG_OBJECT);
615
616 // Allocated the JSArray. Now initialize the fields except for the elements
617 // array.
618 // result: JSObject
619 // elements_array: initial map
620 // elements_array_end: start of next object
621 // array_size: size of array (smi)
622 __ bind(&allocated);
623 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
624 __ Move(elements_array, Factory::empty_fixed_array());
625 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
626 // Field JSArray::kElementsOffset is initialized later.
627 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
628
629 // Calculate the location of the elements array and set elements array member
630 // of the JSArray.
631 // result: JSObject
632 // elements_array_end: start of next object
633 // array_size: size of array (smi)
634 __ lea(elements_array, Operand(result, JSArray::kSize));
635 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
636
637 // Initialize the fixed array. FixedArray length is not stored as a smi.
638 // result: JSObject
639 // elements_array: elements array
640 // elements_array_end: start of next object
641 // array_size: size of array (smi)
642 ASSERT(kSmiTag == 0);
643 __ SmiToInteger64(array_size, array_size);
644 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
645 Factory::fixed_array_map());
646 Label not_empty_2, fill_array;
647 __ testq(array_size, array_size);
648 __ j(not_zero, &not_empty_2);
649 // Length of the FixedArray is the number of pre-allocated elements even
650 // though the actual JSArray has length 0.
651 __ movq(FieldOperand(elements_array, Array::kLengthOffset),
652 Immediate(kPreallocatedArrayElements));
653 __ jmp(&fill_array);
654 __ bind(&not_empty_2);
655 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
656 // same.
657 __ movq(FieldOperand(elements_array, Array::kLengthOffset), array_size);
658
659 // Fill the allocated FixedArray with the hole value if requested.
660 // result: JSObject
661 // elements_array: elements array
662 // elements_array_end: start of next object
663 __ bind(&fill_array);
664 if (fill_with_hole) {
665 Label loop, entry;
666 __ Move(scratch, Factory::the_hole_value());
667 __ lea(elements_array, Operand(elements_array,
668 FixedArray::kHeaderSize - kHeapObjectTag));
669 __ jmp(&entry);
670 __ bind(&loop);
671 __ movq(Operand(elements_array, 0), scratch);
672 __ addq(elements_array, Immediate(kPointerSize));
673 __ bind(&entry);
674 __ cmpq(elements_array, elements_array_end);
675 __ j(below, &loop);
676 }
677}
678
679
680// Create a new array for the built-in Array function. This function allocates
681// the JSArray object and the FixedArray elements array and initializes these.
682// If the Array cannot be constructed in native code the runtime is called. This
683// function assumes the following state:
684// rdi: constructor (built-in Array function)
685// rax: argc
686// rsp[0]: return address
687// rsp[8]: last argument
688// This function is used for both construct and normal calls of Array. The only
689// difference between handling a construct call and a normal call is that for a
690// construct call the constructor function in rdi needs to be preserved for
691// entering the generic code. In both cases argc in rax needs to be preserved.
692// Both registers are preserved by this code so no need to differentiate between
693// a construct call and a normal call.
694static void ArrayNativeCode(MacroAssembler* masm,
695 Label *call_generic_code) {
696 Label argc_one_or_more, argc_two_or_more;
697
698 // Check for array construction with zero arguments.
699 __ testq(rax, rax);
700 __ j(not_zero, &argc_one_or_more);
701
702 // Handle construction of an empty array.
703 AllocateEmptyJSArray(masm,
704 rdi,
705 rbx,
706 rcx,
707 rdx,
708 r8,
709 kPreallocatedArrayElements,
710 call_generic_code);
711 __ IncrementCounter(&Counters::array_function_native, 1);
712 __ movq(rax, rbx);
713 __ ret(kPointerSize);
714
715 // Check for one argument. Bail out if argument is not smi or if it is
716 // negative.
717 __ bind(&argc_one_or_more);
718 __ cmpq(rax, Immediate(1));
719 __ j(not_equal, &argc_two_or_more);
720 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
Steve Block3ce2e202009-11-05 08:53:23 +0000721 __ JumpIfNotPositiveSmi(rdx, call_generic_code);
Steve Blocka7e24c12009-10-30 11:49:00 +0000722
723 // Handle construction of an empty array of a certain size. Bail out if size
724 // is to large to actually allocate an elements array.
Steve Block3ce2e202009-11-05 08:53:23 +0000725 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
726 __ j(greater_equal, call_generic_code);
Steve Blocka7e24c12009-10-30 11:49:00 +0000727
728 // rax: argc
729 // rdx: array_size (smi)
730 // rdi: constructor
731 // esp[0]: return address
732 // esp[8]: argument
733 AllocateJSArray(masm,
734 rdi,
735 rdx,
736 rbx,
737 rcx,
738 r8,
739 r9,
740 true,
741 call_generic_code);
742 __ IncrementCounter(&Counters::array_function_native, 1);
743 __ movq(rax, rbx);
744 __ ret(2 * kPointerSize);
745
746 // Handle construction of an array from a list of arguments.
747 __ bind(&argc_two_or_more);
748 __ movq(rdx, rax);
749 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
750 // rax: argc
751 // rdx: array_size (smi)
752 // rdi: constructor
753 // esp[0] : return address
754 // esp[8] : last argument
755 AllocateJSArray(masm,
756 rdi,
757 rdx,
758 rbx,
759 rcx,
760 r8,
761 r9,
762 false,
763 call_generic_code);
764 __ IncrementCounter(&Counters::array_function_native, 1);
765
766 // rax: argc
767 // rbx: JSArray
768 // rcx: elements_array
769 // r8: elements_array_end (untagged)
770 // esp[0]: return address
771 // esp[8]: last argument
772
773 // Location of the last argument
774 __ lea(r9, Operand(rsp, kPointerSize));
775
776 // Location of the first array element (Parameter fill_with_holes to
777 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
778 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
779
780 // rax: argc
781 // rbx: JSArray
782 // rdx: location of the first array element
783 // r9: location of the last argument
784 // esp[0]: return address
785 // esp[8]: last argument
786 Label loop, entry;
787 __ movq(rcx, rax);
788 __ jmp(&entry);
789 __ bind(&loop);
790 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
791 __ movq(Operand(rdx, 0), kScratchRegister);
792 __ addq(rdx, Immediate(kPointerSize));
793 __ bind(&entry);
794 __ decq(rcx);
795 __ j(greater_equal, &loop);
796
797 // Remove caller arguments from the stack and return.
798 // rax: argc
799 // rbx: JSArray
800 // esp[0]: return address
801 // esp[8]: last argument
802 __ pop(rcx);
803 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
804 __ push(rcx);
805 __ movq(rax, rbx);
806 __ ret(0);
807}
808
809
810void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
811 // ----------- S t a t e -------------
812 // -- rax : argc
813 // -- rsp[0] : return address
814 // -- rsp[8] : last argument
815 // -----------------------------------
816 Label generic_array_code;
817
818 // Get the Array function.
819 GenerateLoadArrayFunction(masm, rdi);
820
821 if (FLAG_debug_code) {
822 // Initial map for the builtin Array function shoud be a map.
823 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
824 // Will both indicate a NULL and a Smi.
825 ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000826 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
827 __ Check(not_smi, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000828 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000829 __ Check(equal, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000830 }
831
832 // Run the native code for the Array function called as a normal function.
833 ArrayNativeCode(masm, &generic_array_code);
834
835 // Jump to the generic array code in case the specialized code cannot handle
836 // the construction.
837 __ bind(&generic_array_code);
838 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
839 Handle<Code> array_code(code);
840 __ Jump(array_code, RelocInfo::CODE_TARGET);
841}
842
843
844void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
845 // ----------- S t a t e -------------
846 // -- rax : argc
847 // -- rdi : constructor
848 // -- rsp[0] : return address
849 // -- rsp[8] : last argument
850 // -----------------------------------
851 Label generic_constructor;
852
853 if (FLAG_debug_code) {
854 // The array construct code is only set for the builtin Array function which
855 // does always have a map.
856 GenerateLoadArrayFunction(masm, rbx);
857 __ cmpq(rdi, rbx);
Steve Block3ce2e202009-11-05 08:53:23 +0000858 __ Check(equal, "Unexpected Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000859 // Initial map for the builtin Array function should be a map.
860 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
861 // Will both indicate a NULL and a Smi.
862 ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000863 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
864 __ Check(not_smi, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000865 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000866 __ Check(equal, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000867 }
868
869 // Run the native code for the Array function called as constructor.
870 ArrayNativeCode(masm, &generic_constructor);
871
872 // Jump to the generic construct code in case the specialized code cannot
873 // handle the construction.
874 __ bind(&generic_constructor);
875 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
876 Handle<Code> generic_construct_stub(code);
877 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
878}
879
880
881void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
882 // ----------- S t a t e -------------
883 // -- rax: number of arguments
884 // -- rdi: constructor function
885 // -----------------------------------
886
887 Label non_function_call;
888 // Check that function is not a smi.
889 __ JumpIfSmi(rdi, &non_function_call);
890 // Check that function is a JSFunction.
891 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
892 __ j(not_equal, &non_function_call);
893
894 // Jump to the function-specific construct stub.
895 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
896 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
897 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
898 __ jmp(rbx);
899
900 // edi: called object
901 // eax: number of arguments
902 __ bind(&non_function_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000903 // Set expected number of arguments to zero (not changing eax).
904 __ movq(rbx, Immediate(0));
905 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
906 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
907 RelocInfo::CODE_TARGET);
908}
909
910
Leon Clarkee46be812010-01-19 14:06:41 +0000911static void Generate_JSConstructStubHelper(MacroAssembler* masm,
912 bool is_api_function) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000913 // Enter a construct frame.
914 __ EnterConstructFrame();
915
916 // Store a smi-tagged arguments count on the stack.
917 __ Integer32ToSmi(rax, rax);
918 __ push(rax);
919
920 // Push the function to invoke on the stack.
921 __ push(rdi);
922
923 // Try to allocate the object without transitioning into C code. If any of the
924 // preconditions is not met, the code bails out to the runtime call.
925 Label rt_call, allocated;
926 if (FLAG_inline_new) {
927 Label undo_allocation;
928
929#ifdef ENABLE_DEBUGGER_SUPPORT
930 ExternalReference debug_step_in_fp =
931 ExternalReference::debug_step_in_fp_address();
932 __ movq(kScratchRegister, debug_step_in_fp);
933 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
934 __ j(not_equal, &rt_call);
935#endif
936
937 // Verified that the constructor is a JSFunction.
938 // Load the initial map and verify that it is in fact a map.
939 // rdi: constructor
940 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
941 // Will both indicate a NULL and a Smi
942 ASSERT(kSmiTag == 0);
943 __ JumpIfSmi(rax, &rt_call);
944 // rdi: constructor
945 // rax: initial map (if proven valid below)
946 __ CmpObjectType(rax, MAP_TYPE, rbx);
947 __ j(not_equal, &rt_call);
948
949 // Check that the constructor is not constructing a JSFunction (see comments
950 // in Runtime_NewObject in runtime.cc). In which case the initial map's
951 // instance type would be JS_FUNCTION_TYPE.
952 // rdi: constructor
953 // rax: initial map
954 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
955 __ j(equal, &rt_call);
956
957 // Now allocate the JSObject on the heap.
958 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
959 __ shl(rdi, Immediate(kPointerSizeLog2));
960 // rdi: size of new object
961 __ AllocateInNewSpace(rdi,
962 rbx,
963 rdi,
964 no_reg,
965 &rt_call,
966 NO_ALLOCATION_FLAGS);
967 // Allocated the JSObject, now initialize the fields.
968 // rax: initial map
969 // rbx: JSObject (not HeapObject tagged - the actual address).
970 // rdi: start of next object
971 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
972 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
973 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
974 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
975 // Set extra fields in the newly allocated object.
976 // rax: initial map
977 // rbx: JSObject
978 // rdi: start of next object
979 { Label loop, entry;
980 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
981 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
982 __ jmp(&entry);
983 __ bind(&loop);
984 __ movq(Operand(rcx, 0), rdx);
985 __ addq(rcx, Immediate(kPointerSize));
986 __ bind(&entry);
987 __ cmpq(rcx, rdi);
988 __ j(less, &loop);
989 }
990
991 // Add the object tag to make the JSObject real, so that we can continue and
992 // jump into the continuation code at any time from now on. Any failures
993 // need to undo the allocation, so that the heap is in a consistent state
994 // and verifiable.
995 // rax: initial map
996 // rbx: JSObject
997 // rdi: start of next object
998 __ or_(rbx, Immediate(kHeapObjectTag));
999
1000 // Check if a non-empty properties array is needed.
1001 // Allocate and initialize a FixedArray if it is.
1002 // rax: initial map
1003 // rbx: JSObject
1004 // rdi: start of next object
1005 // Calculate total properties described map.
1006 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
1007 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
1008 __ addq(rdx, rcx);
1009 // Calculate unused properties past the end of the in-object properties.
1010 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
1011 __ subq(rdx, rcx);
1012 // Done if no extra properties are to be allocated.
1013 __ j(zero, &allocated);
1014 __ Assert(positive, "Property allocation count failed.");
1015
1016 // Scale the number of elements by pointer size and add the header for
1017 // FixedArrays to the start of the next object calculation from above.
1018 // rbx: JSObject
1019 // rdi: start of next object (will be start of FixedArray)
1020 // rdx: number of elements in properties array
1021 __ AllocateInNewSpace(FixedArray::kHeaderSize,
1022 times_pointer_size,
1023 rdx,
1024 rdi,
1025 rax,
1026 no_reg,
1027 &undo_allocation,
1028 RESULT_CONTAINS_TOP);
1029
1030 // Initialize the FixedArray.
1031 // rbx: JSObject
1032 // rdi: FixedArray
1033 // rdx: number of elements
1034 // rax: start of next object
1035 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
1036 __ movq(Operand(rdi, JSObject::kMapOffset), rcx); // setup the map
1037 __ movl(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
1038
1039 // Initialize the fields to undefined.
1040 // rbx: JSObject
1041 // rdi: FixedArray
1042 // rax: start of next object
1043 // rdx: number of elements
1044 { Label loop, entry;
1045 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1046 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
1047 __ jmp(&entry);
1048 __ bind(&loop);
1049 __ movq(Operand(rcx, 0), rdx);
1050 __ addq(rcx, Immediate(kPointerSize));
1051 __ bind(&entry);
1052 __ cmpq(rcx, rax);
1053 __ j(below, &loop);
1054 }
1055
1056 // Store the initialized FixedArray into the properties field of
1057 // the JSObject
1058 // rbx: JSObject
1059 // rdi: FixedArray
1060 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
1061 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
1062
1063
1064 // Continue with JSObject being successfully allocated
1065 // rbx: JSObject
1066 __ jmp(&allocated);
1067
1068 // Undo the setting of the new top so that the heap is verifiable. For
1069 // example, the map's unused properties potentially do not match the
1070 // allocated objects unused properties.
1071 // rbx: JSObject (previous new top)
1072 __ bind(&undo_allocation);
1073 __ UndoAllocationInNewSpace(rbx);
1074 }
1075
1076 // Allocate the new receiver object using the runtime call.
1077 // rdi: function (constructor)
1078 __ bind(&rt_call);
1079 // Must restore rdi (constructor) before calling runtime.
1080 __ movq(rdi, Operand(rsp, 0));
1081 __ push(rdi);
1082 __ CallRuntime(Runtime::kNewObject, 1);
1083 __ movq(rbx, rax); // store result in rbx
1084
1085 // New object allocated.
1086 // rbx: newly allocated object
1087 __ bind(&allocated);
1088 // Retrieve the function from the stack.
1089 __ pop(rdi);
1090
1091 // Retrieve smi-tagged arguments count from the stack.
1092 __ movq(rax, Operand(rsp, 0));
1093 __ SmiToInteger32(rax, rax);
1094
1095 // Push the allocated receiver to the stack. We need two copies
1096 // because we may have to return the original one and the calling
1097 // conventions dictate that the called function pops the receiver.
1098 __ push(rbx);
1099 __ push(rbx);
1100
1101 // Setup pointer to last argument.
1102 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
1103
1104 // Copy arguments and receiver to the expression stack.
1105 Label loop, entry;
1106 __ movq(rcx, rax);
1107 __ jmp(&entry);
1108 __ bind(&loop);
1109 __ push(Operand(rbx, rcx, times_pointer_size, 0));
1110 __ bind(&entry);
1111 __ decq(rcx);
1112 __ j(greater_equal, &loop);
1113
1114 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00001115 if (is_api_function) {
1116 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1117 Handle<Code> code = Handle<Code>(
1118 Builtins::builtin(Builtins::HandleApiCallConstruct));
1119 ParameterCount expected(0);
1120 __ InvokeCode(code, expected, expected,
1121 RelocInfo::CODE_TARGET, CALL_FUNCTION);
1122 } else {
1123 ParameterCount actual(rax);
1124 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
1125 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001126
1127 // Restore context from the frame.
1128 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1129
1130 // If the result is an object (in the ECMA sense), we should get rid
1131 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1132 // on page 74.
1133 Label use_receiver, exit;
1134 // If the result is a smi, it is *not* an object in the ECMA sense.
1135 __ JumpIfSmi(rax, &use_receiver);
1136
1137 // If the type of the result (stored in its map) is less than
1138 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
1139 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
1140 __ j(above_equal, &exit);
1141
1142 // Throw away the result of the constructor invocation and use the
1143 // on-stack receiver as the result.
1144 __ bind(&use_receiver);
1145 __ movq(rax, Operand(rsp, 0));
1146
1147 // Restore the arguments count and leave the construct frame.
1148 __ bind(&exit);
1149 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
1150 __ LeaveConstructFrame();
1151
1152 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +00001153 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +00001154 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1155 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001156 __ push(rcx);
1157 __ IncrementCounter(&Counters::constructed_objects, 1);
1158 __ ret(0);
1159}
1160
1161
Leon Clarkee46be812010-01-19 14:06:41 +00001162void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1163 Generate_JSConstructStubHelper(masm, false);
1164}
1165
1166
1167void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1168 Generate_JSConstructStubHelper(masm, true);
1169}
1170
1171
Steve Blocka7e24c12009-10-30 11:49:00 +00001172static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1173 bool is_construct) {
1174 // Expects five C++ function parameters.
1175 // - Address entry (ignored)
1176 // - JSFunction* function (
1177 // - Object* receiver
1178 // - int argc
1179 // - Object*** argv
1180 // (see Handle::Invoke in execution.cc).
1181
1182 // Platform specific argument handling. After this, the stack contains
1183 // an internal frame and the pushed function and receiver, and
1184 // register rax and rbx holds the argument count and argument array,
1185 // while rdi holds the function pointer and rsi the context.
1186#ifdef _WIN64
1187 // MSVC parameters in:
1188 // rcx : entry (ignored)
1189 // rdx : function
1190 // r8 : receiver
1191 // r9 : argc
1192 // [rsp+0x20] : argv
1193
1194 // Clear the context before we push it when entering the JS frame.
1195 __ xor_(rsi, rsi);
1196 __ EnterInternalFrame();
1197
1198 // Load the function context into rsi.
1199 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
1200
1201 // Push the function and the receiver onto the stack.
1202 __ push(rdx);
1203 __ push(r8);
1204
1205 // Load the number of arguments and setup pointer to the arguments.
1206 __ movq(rax, r9);
1207 // Load the previous frame pointer to access C argument on stack
1208 __ movq(kScratchRegister, Operand(rbp, 0));
1209 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
1210 // Load the function pointer into rdi.
1211 __ movq(rdi, rdx);
1212#else // !defined(_WIN64)
1213 // GCC parameters in:
1214 // rdi : entry (ignored)
1215 // rsi : function
1216 // rdx : receiver
1217 // rcx : argc
1218 // r8 : argv
1219
1220 __ movq(rdi, rsi);
1221 // rdi : function
1222
1223 // Clear the context before we push it when entering the JS frame.
1224 __ xor_(rsi, rsi);
1225 // Enter an internal frame.
1226 __ EnterInternalFrame();
1227
1228 // Push the function and receiver and setup the context.
1229 __ push(rdi);
1230 __ push(rdx);
1231 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1232
1233 // Load the number of arguments and setup pointer to the arguments.
1234 __ movq(rax, rcx);
1235 __ movq(rbx, r8);
1236#endif // _WIN64
1237
1238 // Set up the roots register.
1239 ExternalReference roots_address = ExternalReference::roots_address();
1240 __ movq(r13, roots_address);
1241
1242 // Current stack contents:
1243 // [rsp + 2 * kPointerSize ... ]: Internal frame
1244 // [rsp + kPointerSize] : function
1245 // [rsp] : receiver
1246 // Current register contents:
1247 // rax : argc
1248 // rbx : argv
1249 // rsi : context
1250 // rdi : function
1251
1252 // Copy arguments to the stack in a loop.
1253 // Register rbx points to array of pointers to handle locations.
1254 // Push the values of these handles.
1255 Label loop, entry;
1256 __ xor_(rcx, rcx); // Set loop variable to 0.
1257 __ jmp(&entry);
1258 __ bind(&loop);
1259 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
1260 __ push(Operand(kScratchRegister, 0)); // dereference handle
1261 __ addq(rcx, Immediate(1));
1262 __ bind(&entry);
1263 __ cmpq(rcx, rax);
1264 __ j(not_equal, &loop);
1265
1266 // Invoke the code.
1267 if (is_construct) {
1268 // Expects rdi to hold function pointer.
1269 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
1270 RelocInfo::CODE_TARGET);
1271 } else {
1272 ParameterCount actual(rax);
1273 // Function must be in rdi.
1274 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
1275 }
1276
1277 // Exit the JS frame. Notice that this also removes the empty
1278 // context and the function left on the stack by the code
1279 // invocation.
1280 __ LeaveInternalFrame();
1281 // TODO(X64): Is argument correct? Is there a receiver to remove?
1282 __ ret(1 * kPointerSize); // remove receiver
1283}
1284
1285
1286void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1287 Generate_JSEntryTrampolineHelper(masm, false);
1288}
1289
1290
1291void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1292 Generate_JSEntryTrampolineHelper(masm, true);
1293}
1294
1295} } // namespace v8::internal