blob: 01992ce4fb67ac708226d1bef77d8dace3d021f6 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29#include "codegen-inl.h"
30#include "macro-assembler.h"
31
32namespace v8 {
33namespace internal {
34
35#define __ ACCESS_MASM(masm)
36
37void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
38 // TODO(428): Don't pass the function in a static variable.
39 ExternalReference passed = ExternalReference::builtin_passed_function();
40 __ movq(kScratchRegister, passed.address(), RelocInfo::EXTERNAL_REFERENCE);
41 __ movq(Operand(kScratchRegister, 0), rdi);
42
43 // The actual argument count has already been loaded into register
44 // rax, but JumpToRuntime expects rax to contain the number of
45 // arguments including the receiver.
46 __ incq(rax);
47 __ JumpToRuntime(ExternalReference(id), 1);
48}
49
50
51static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
52 __ push(rbp);
53 __ movq(rbp, rsp);
54
55 // Store the arguments adaptor context sentinel.
Steve Block3ce2e202009-11-05 08:53:23 +000056 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +000057
58 // Push the function on the stack.
59 __ push(rdi);
60
61 // Preserve the number of arguments on the stack. Must preserve both
62 // rax and rbx because these registers are used when copying the
63 // arguments and the receiver.
64 __ Integer32ToSmi(rcx, rax);
65 __ push(rcx);
66}
67
68
69static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
70 // Retrieve the number of arguments from the stack. Number is a Smi.
71 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
72
73 // Leave the frame.
74 __ movq(rsp, rbp);
75 __ pop(rbp);
76
77 // Remove caller arguments from the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +000078 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +000079 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
80 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +000081 __ push(rcx);
82}
83
84
85void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
86 // ----------- S t a t e -------------
87 // -- rax : actual number of arguments
88 // -- rbx : expected number of arguments
89 // -- rdx : code entry to call
90 // -----------------------------------
91
92 Label invoke, dont_adapt_arguments;
93 __ IncrementCounter(&Counters::arguments_adaptors, 1);
94
95 Label enough, too_few;
96 __ cmpq(rax, rbx);
97 __ j(less, &too_few);
98 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
99 __ j(equal, &dont_adapt_arguments);
100
101 { // Enough parameters: Actual >= expected.
102 __ bind(&enough);
103 EnterArgumentsAdaptorFrame(masm);
104
105 // Copy receiver and all expected arguments.
106 const int offset = StandardFrameConstants::kCallerSPOffset;
107 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
108 __ movq(rcx, Immediate(-1)); // account for receiver
109
110 Label copy;
111 __ bind(&copy);
112 __ incq(rcx);
113 __ push(Operand(rax, 0));
114 __ subq(rax, Immediate(kPointerSize));
115 __ cmpq(rcx, rbx);
116 __ j(less, &copy);
117 __ jmp(&invoke);
118 }
119
120 { // Too few parameters: Actual < expected.
121 __ bind(&too_few);
122 EnterArgumentsAdaptorFrame(masm);
123
124 // Copy receiver and all actual arguments.
125 const int offset = StandardFrameConstants::kCallerSPOffset;
126 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
127 __ movq(rcx, Immediate(-1)); // account for receiver
128
129 Label copy;
130 __ bind(&copy);
131 __ incq(rcx);
132 __ push(Operand(rdi, 0));
133 __ subq(rdi, Immediate(kPointerSize));
134 __ cmpq(rcx, rax);
135 __ j(less, &copy);
136
137 // Fill remaining expected arguments with undefined values.
138 Label fill;
139 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
140 __ bind(&fill);
141 __ incq(rcx);
142 __ push(kScratchRegister);
143 __ cmpq(rcx, rbx);
144 __ j(less, &fill);
145
146 // Restore function pointer.
147 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
148 }
149
150 // Call the entry point.
151 __ bind(&invoke);
152 __ call(rdx);
153
154 // Leave frame and return.
155 LeaveArgumentsAdaptorFrame(masm);
156 __ ret(0);
157
158 // -------------------------------------------
159 // Dont adapt arguments.
160 // -------------------------------------------
161 __ bind(&dont_adapt_arguments);
162 __ jmp(rdx);
163}
164
165
166void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
167 // Stack Layout:
168 // rsp: return address
169 // +1: Argument n
170 // +2: Argument n-1
171 // ...
172 // +n: Argument 1 = receiver
173 // +n+1: Argument 0 = function to call
174 //
175 // rax contains the number of arguments, n, not counting the function.
176 //
177 // 1. Make sure we have at least one argument.
178 { Label done;
179 __ testq(rax, rax);
180 __ j(not_zero, &done);
181 __ pop(rbx);
182 __ Push(Factory::undefined_value());
183 __ push(rbx);
184 __ incq(rax);
185 __ bind(&done);
186 }
187
188 // 2. Get the function to call from the stack.
189 { Label done, non_function, function;
190 // The function to call is at position n+1 on the stack.
191 __ movq(rdi, Operand(rsp, rax, times_pointer_size, +1 * kPointerSize));
192 __ JumpIfSmi(rdi, &non_function);
193 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
194 __ j(equal, &function);
195
196 // Non-function called: Clear the function to force exception.
197 __ bind(&non_function);
198 __ xor_(rdi, rdi);
199 __ jmp(&done);
200
201 // Function called: Change context eagerly to get the right global object.
202 __ bind(&function);
203 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
204
205 __ bind(&done);
206 }
207
208 // 3. Make sure first argument is an object; convert if necessary.
209 { Label call_to_object, use_global_receiver, patch_receiver, done;
210 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
211
212 __ JumpIfSmi(rbx, &call_to_object);
213
214 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
215 __ j(equal, &use_global_receiver);
216 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
217 __ j(equal, &use_global_receiver);
218
219 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
220 __ j(below, &call_to_object);
221 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
222 __ j(below_equal, &done);
223
224 __ bind(&call_to_object);
225 __ EnterInternalFrame(); // preserves rax, rbx, rdi
226
227 // Store the arguments count on the stack (smi tagged).
228 __ Integer32ToSmi(rax, rax);
229 __ push(rax);
230
231 __ push(rdi); // save edi across the call
232 __ push(rbx);
233 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
234 __ movq(rbx, rax);
235 __ pop(rdi); // restore edi after the call
236
237 // Get the arguments count and untag it.
238 __ pop(rax);
239 __ SmiToInteger32(rax, rax);
240
241 __ LeaveInternalFrame();
242 __ jmp(&patch_receiver);
243
244 // Use the global receiver object from the called function as the receiver.
245 __ bind(&use_global_receiver);
246 const int kGlobalIndex =
247 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
248 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
249 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
250
251 __ bind(&patch_receiver);
252 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
253
254 __ bind(&done);
255 }
256
257 // 4. Shift stuff one slot down the stack.
258 { Label loop;
259 __ lea(rcx, Operand(rax, +1)); // +1 ~ copy receiver too
260 __ bind(&loop);
261 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
262 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
263 __ decq(rcx);
264 __ j(not_zero, &loop);
265 }
266
267 // 5. Remove TOS (copy of last arguments), but keep return address.
268 __ pop(rbx);
269 __ pop(rcx);
270 __ push(rbx);
271 __ decq(rax);
272
273 // 6. Check that function really was a function and get the code to
274 // call from the function and check that the number of expected
275 // arguments matches what we're providing.
276 { Label invoke, trampoline;
277 __ testq(rdi, rdi);
278 __ j(not_zero, &invoke);
279 __ xor_(rbx, rbx);
280 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
281 __ bind(&trampoline);
282 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
283 RelocInfo::CODE_TARGET);
284
285 __ bind(&invoke);
286 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
287 __ movsxlq(rbx,
288 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
289 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
290 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
291 __ cmpq(rax, rbx);
292 __ j(not_equal, &trampoline);
293 }
294
295 // 7. Jump (tail-call) to the code in register edx without checking arguments.
296 ParameterCount expected(0);
297 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
298}
299
300
301void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
302 // Stack at entry:
303 // rsp: return address
304 // rsp+8: arguments
305 // rsp+16: receiver ("this")
306 // rsp+24: function
307 __ EnterInternalFrame();
308 // Stack frame:
309 // rbp: Old base pointer
310 // rbp[1]: return address
311 // rbp[2]: function arguments
312 // rbp[3]: receiver
313 // rbp[4]: function
314 static const int kArgumentsOffset = 2 * kPointerSize;
315 static const int kReceiverOffset = 3 * kPointerSize;
316 static const int kFunctionOffset = 4 * kPointerSize;
317 __ push(Operand(rbp, kFunctionOffset));
318 __ push(Operand(rbp, kArgumentsOffset));
319 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
320
321 if (FLAG_check_stack) {
322 // We need to catch preemptions right here, otherwise an unlucky preemption
323 // could show up as a failed apply.
324 Label retry_preemption;
325 Label no_preemption;
326 __ bind(&retry_preemption);
327 ExternalReference stack_guard_limit =
328 ExternalReference::address_of_stack_guard_limit();
329 __ movq(kScratchRegister, stack_guard_limit);
330 __ movq(rcx, rsp);
331 __ subq(rcx, Operand(kScratchRegister, 0));
332 // rcx contains the difference between the stack limit and the stack top.
333 // We use it below to check that there is enough room for the arguments.
334 __ j(above, &no_preemption);
335
336 // Preemption!
337 // Because runtime functions always remove the receiver from the stack, we
338 // have to fake one to avoid underflowing the stack.
339 __ push(rax);
Steve Block3ce2e202009-11-05 08:53:23 +0000340 __ Push(Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000341
342 // Do call to runtime routine.
343 __ CallRuntime(Runtime::kStackGuard, 1);
344 __ pop(rax);
345 __ jmp(&retry_preemption);
346
347 __ bind(&no_preemption);
348
349 Label okay;
350 // Make rdx the space we need for the array when it is unrolled onto the
351 // stack.
352 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
353 __ cmpq(rcx, rdx);
354 __ j(greater, &okay);
355
356 // Too bad: Out of stack space.
357 __ push(Operand(rbp, kFunctionOffset));
358 __ push(rax);
359 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
360 __ bind(&okay);
361 }
362
363 // Push current index and limit.
364 const int kLimitOffset =
365 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
366 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
367 __ push(rax); // limit
368 __ push(Immediate(0)); // index
369
370 // Change context eagerly to get the right global object if
371 // necessary.
372 __ movq(rdi, Operand(rbp, kFunctionOffset));
373 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
374
375 // Compute the receiver.
376 Label call_to_object, use_global_receiver, push_receiver;
377 __ movq(rbx, Operand(rbp, kReceiverOffset));
378 __ JumpIfSmi(rbx, &call_to_object);
379 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
380 __ j(equal, &use_global_receiver);
381 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
382 __ j(equal, &use_global_receiver);
383
384 // If given receiver is already a JavaScript object then there's no
385 // reason for converting it.
386 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
387 __ j(below, &call_to_object);
388 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
389 __ j(below_equal, &push_receiver);
390
391 // Convert the receiver to an object.
392 __ bind(&call_to_object);
393 __ push(rbx);
394 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
395 __ movq(rbx, rax);
396 __ jmp(&push_receiver);
397
398 // Use the current global receiver object as the receiver.
399 __ bind(&use_global_receiver);
400 const int kGlobalOffset =
401 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
402 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
403 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
404
405 // Push the receiver.
406 __ bind(&push_receiver);
407 __ push(rbx);
408
409 // Copy all arguments from the array to the stack.
410 Label entry, loop;
411 __ movq(rax, Operand(rbp, kIndexOffset));
412 __ jmp(&entry);
413 __ bind(&loop);
414 __ movq(rcx, Operand(rbp, kArgumentsOffset)); // load arguments
415 __ push(rcx);
416 __ push(rax);
417
418 // Use inline caching to speed up access to arguments.
419 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
420 __ Call(ic, RelocInfo::CODE_TARGET);
421 // It is important that we do not have a test instruction after the
422 // call. A test instruction after the call is used to indicate that
423 // we have generated an inline version of the keyed load. In this
424 // case, we know that we are not generating a test instruction next.
425
426 // Remove IC arguments from the stack and push the nth argument.
427 __ addq(rsp, Immediate(2 * kPointerSize));
428 __ push(rax);
429
430 // Update the index on the stack and in register rax.
431 __ movq(rax, Operand(rbp, kIndexOffset));
Steve Block3ce2e202009-11-05 08:53:23 +0000432 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000433 __ movq(Operand(rbp, kIndexOffset), rax);
434
435 __ bind(&entry);
436 __ cmpq(rax, Operand(rbp, kLimitOffset));
437 __ j(not_equal, &loop);
438
439 // Invoke the function.
440 ParameterCount actual(rax);
441 __ SmiToInteger32(rax, rax);
442 __ movq(rdi, Operand(rbp, kFunctionOffset));
443 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
444
445 __ LeaveInternalFrame();
446 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
447}
448
449
450// Load the built-in Array function from the current context.
451static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
452 // Load the global context.
453 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
454 __ movq(result, FieldOperand(result, GlobalObject::kGlobalContextOffset));
455 // Load the Array function from the global context.
456 __ movq(result,
457 Operand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
458}
459
460
461// Number of empty elements to allocate for an empty array.
462static const int kPreallocatedArrayElements = 4;
463
464
465// Allocate an empty JSArray. The allocated array is put into the result
466// register. If the parameter initial_capacity is larger than zero an elements
467// backing store is allocated with this size and filled with the hole values.
468// Otherwise the elements backing store is set to the empty FixedArray.
469static void AllocateEmptyJSArray(MacroAssembler* masm,
470 Register array_function,
471 Register result,
472 Register scratch1,
473 Register scratch2,
474 Register scratch3,
475 int initial_capacity,
476 Label* gc_required) {
477 ASSERT(initial_capacity >= 0);
478
479 // Load the initial map from the array function.
480 __ movq(scratch1, FieldOperand(array_function,
481 JSFunction::kPrototypeOrInitialMapOffset));
482
483 // Allocate the JSArray object together with space for a fixed array with the
484 // requested elements.
485 int size = JSArray::kSize;
486 if (initial_capacity > 0) {
487 size += FixedArray::SizeFor(initial_capacity);
488 }
489 __ AllocateInNewSpace(size,
490 result,
491 scratch2,
492 scratch3,
493 gc_required,
494 TAG_OBJECT);
495
496 // Allocated the JSArray. Now initialize the fields except for the elements
497 // array.
498 // result: JSObject
499 // scratch1: initial map
500 // scratch2: start of next object
501 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
502 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
503 Factory::empty_fixed_array());
504 // Field JSArray::kElementsOffset is initialized later.
Steve Block3ce2e202009-11-05 08:53:23 +0000505 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000506
507 // If no storage is requested for the elements array just set the empty
508 // fixed array.
509 if (initial_capacity == 0) {
510 __ Move(FieldOperand(result, JSArray::kElementsOffset),
511 Factory::empty_fixed_array());
512 return;
513 }
514
515 // Calculate the location of the elements array and set elements array member
516 // of the JSArray.
517 // result: JSObject
518 // scratch2: start of next object
519 __ lea(scratch1, Operand(result, JSArray::kSize));
520 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
521
522 // Initialize the FixedArray and fill it with holes. FixedArray length is not
523 // stored as a smi.
524 // result: JSObject
525 // scratch1: elements array
526 // scratch2: start of next object
527 __ Move(FieldOperand(scratch1, JSObject::kMapOffset),
528 Factory::fixed_array_map());
529 __ movq(FieldOperand(scratch1, Array::kLengthOffset),
530 Immediate(initial_capacity));
531
532 // Fill the FixedArray with the hole value. Inline the code if short.
533 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
534 static const int kLoopUnfoldLimit = 4;
535 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
536 __ Move(scratch3, Factory::the_hole_value());
537 if (initial_capacity <= kLoopUnfoldLimit) {
538 // Use a scratch register here to have only one reloc info when unfolding
539 // the loop.
540 for (int i = 0; i < initial_capacity; i++) {
541 __ movq(FieldOperand(scratch1,
542 FixedArray::kHeaderSize + i * kPointerSize),
543 scratch3);
544 }
545 } else {
546 Label loop, entry;
547 __ jmp(&entry);
548 __ bind(&loop);
549 __ movq(Operand(scratch1, 0), scratch3);
550 __ addq(scratch1, Immediate(kPointerSize));
551 __ bind(&entry);
552 __ cmpq(scratch1, scratch2);
553 __ j(below, &loop);
554 }
555}
556
557
558// Allocate a JSArray with the number of elements stored in a register. The
559// register array_function holds the built-in Array function and the register
560// array_size holds the size of the array as a smi. The allocated array is put
561// into the result register and beginning and end of the FixedArray elements
562// storage is put into registers elements_array and elements_array_end (see
563// below for when that is not the case). If the parameter fill_with_holes is
564// true the allocated elements backing store is filled with the hole values
565// otherwise it is left uninitialized. When the backing store is filled the
566// register elements_array is scratched.
567static void AllocateJSArray(MacroAssembler* masm,
568 Register array_function, // Array function.
569 Register array_size, // As a smi.
570 Register result,
571 Register elements_array,
572 Register elements_array_end,
573 Register scratch,
574 bool fill_with_hole,
575 Label* gc_required) {
576 Label not_empty, allocated;
577
578 // Load the initial map from the array function.
579 __ movq(elements_array,
580 FieldOperand(array_function,
581 JSFunction::kPrototypeOrInitialMapOffset));
582
583 // Check whether an empty sized array is requested.
584 __ testq(array_size, array_size);
585 __ j(not_zero, &not_empty);
586
587 // If an empty array is requested allocate a small elements array anyway. This
588 // keeps the code below free of special casing for the empty array.
589 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
590 __ AllocateInNewSpace(size,
591 result,
592 elements_array_end,
593 scratch,
594 gc_required,
595 TAG_OBJECT);
596 __ jmp(&allocated);
597
598 // Allocate the JSArray object together with space for a FixedArray with the
599 // requested elements.
600 __ bind(&not_empty);
601 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
602 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
603 times_half_pointer_size, // array_size is a smi.
604 array_size,
605 result,
606 elements_array_end,
607 scratch,
608 gc_required,
609 TAG_OBJECT);
610
611 // Allocated the JSArray. Now initialize the fields except for the elements
612 // array.
613 // result: JSObject
614 // elements_array: initial map
615 // elements_array_end: start of next object
616 // array_size: size of array (smi)
617 __ bind(&allocated);
618 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
619 __ Move(elements_array, Factory::empty_fixed_array());
620 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
621 // Field JSArray::kElementsOffset is initialized later.
622 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
623
624 // Calculate the location of the elements array and set elements array member
625 // of the JSArray.
626 // result: JSObject
627 // elements_array_end: start of next object
628 // array_size: size of array (smi)
629 __ lea(elements_array, Operand(result, JSArray::kSize));
630 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
631
632 // Initialize the fixed array. FixedArray length is not stored as a smi.
633 // result: JSObject
634 // elements_array: elements array
635 // elements_array_end: start of next object
636 // array_size: size of array (smi)
637 ASSERT(kSmiTag == 0);
638 __ SmiToInteger64(array_size, array_size);
639 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
640 Factory::fixed_array_map());
641 Label not_empty_2, fill_array;
642 __ testq(array_size, array_size);
643 __ j(not_zero, &not_empty_2);
644 // Length of the FixedArray is the number of pre-allocated elements even
645 // though the actual JSArray has length 0.
646 __ movq(FieldOperand(elements_array, Array::kLengthOffset),
647 Immediate(kPreallocatedArrayElements));
648 __ jmp(&fill_array);
649 __ bind(&not_empty_2);
650 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
651 // same.
652 __ movq(FieldOperand(elements_array, Array::kLengthOffset), array_size);
653
654 // Fill the allocated FixedArray with the hole value if requested.
655 // result: JSObject
656 // elements_array: elements array
657 // elements_array_end: start of next object
658 __ bind(&fill_array);
659 if (fill_with_hole) {
660 Label loop, entry;
661 __ Move(scratch, Factory::the_hole_value());
662 __ lea(elements_array, Operand(elements_array,
663 FixedArray::kHeaderSize - kHeapObjectTag));
664 __ jmp(&entry);
665 __ bind(&loop);
666 __ movq(Operand(elements_array, 0), scratch);
667 __ addq(elements_array, Immediate(kPointerSize));
668 __ bind(&entry);
669 __ cmpq(elements_array, elements_array_end);
670 __ j(below, &loop);
671 }
672}
673
674
675// Create a new array for the built-in Array function. This function allocates
676// the JSArray object and the FixedArray elements array and initializes these.
677// If the Array cannot be constructed in native code the runtime is called. This
678// function assumes the following state:
679// rdi: constructor (built-in Array function)
680// rax: argc
681// rsp[0]: return address
682// rsp[8]: last argument
683// This function is used for both construct and normal calls of Array. The only
684// difference between handling a construct call and a normal call is that for a
685// construct call the constructor function in rdi needs to be preserved for
686// entering the generic code. In both cases argc in rax needs to be preserved.
687// Both registers are preserved by this code so no need to differentiate between
688// a construct call and a normal call.
689static void ArrayNativeCode(MacroAssembler* masm,
690 Label *call_generic_code) {
691 Label argc_one_or_more, argc_two_or_more;
692
693 // Check for array construction with zero arguments.
694 __ testq(rax, rax);
695 __ j(not_zero, &argc_one_or_more);
696
697 // Handle construction of an empty array.
698 AllocateEmptyJSArray(masm,
699 rdi,
700 rbx,
701 rcx,
702 rdx,
703 r8,
704 kPreallocatedArrayElements,
705 call_generic_code);
706 __ IncrementCounter(&Counters::array_function_native, 1);
707 __ movq(rax, rbx);
708 __ ret(kPointerSize);
709
710 // Check for one argument. Bail out if argument is not smi or if it is
711 // negative.
712 __ bind(&argc_one_or_more);
713 __ cmpq(rax, Immediate(1));
714 __ j(not_equal, &argc_two_or_more);
715 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
Steve Block3ce2e202009-11-05 08:53:23 +0000716 __ JumpIfNotPositiveSmi(rdx, call_generic_code);
Steve Blocka7e24c12009-10-30 11:49:00 +0000717
718 // Handle construction of an empty array of a certain size. Bail out if size
719 // is to large to actually allocate an elements array.
Steve Block3ce2e202009-11-05 08:53:23 +0000720 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
721 __ j(greater_equal, call_generic_code);
Steve Blocka7e24c12009-10-30 11:49:00 +0000722
723 // rax: argc
724 // rdx: array_size (smi)
725 // rdi: constructor
726 // esp[0]: return address
727 // esp[8]: argument
728 AllocateJSArray(masm,
729 rdi,
730 rdx,
731 rbx,
732 rcx,
733 r8,
734 r9,
735 true,
736 call_generic_code);
737 __ IncrementCounter(&Counters::array_function_native, 1);
738 __ movq(rax, rbx);
739 __ ret(2 * kPointerSize);
740
741 // Handle construction of an array from a list of arguments.
742 __ bind(&argc_two_or_more);
743 __ movq(rdx, rax);
744 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
745 // rax: argc
746 // rdx: array_size (smi)
747 // rdi: constructor
748 // esp[0] : return address
749 // esp[8] : last argument
750 AllocateJSArray(masm,
751 rdi,
752 rdx,
753 rbx,
754 rcx,
755 r8,
756 r9,
757 false,
758 call_generic_code);
759 __ IncrementCounter(&Counters::array_function_native, 1);
760
761 // rax: argc
762 // rbx: JSArray
763 // rcx: elements_array
764 // r8: elements_array_end (untagged)
765 // esp[0]: return address
766 // esp[8]: last argument
767
768 // Location of the last argument
769 __ lea(r9, Operand(rsp, kPointerSize));
770
771 // Location of the first array element (Parameter fill_with_holes to
772 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
773 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
774
775 // rax: argc
776 // rbx: JSArray
777 // rdx: location of the first array element
778 // r9: location of the last argument
779 // esp[0]: return address
780 // esp[8]: last argument
781 Label loop, entry;
782 __ movq(rcx, rax);
783 __ jmp(&entry);
784 __ bind(&loop);
785 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
786 __ movq(Operand(rdx, 0), kScratchRegister);
787 __ addq(rdx, Immediate(kPointerSize));
788 __ bind(&entry);
789 __ decq(rcx);
790 __ j(greater_equal, &loop);
791
792 // Remove caller arguments from the stack and return.
793 // rax: argc
794 // rbx: JSArray
795 // esp[0]: return address
796 // esp[8]: last argument
797 __ pop(rcx);
798 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
799 __ push(rcx);
800 __ movq(rax, rbx);
801 __ ret(0);
802}
803
804
805void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
806 // ----------- S t a t e -------------
807 // -- rax : argc
808 // -- rsp[0] : return address
809 // -- rsp[8] : last argument
810 // -----------------------------------
811 Label generic_array_code;
812
813 // Get the Array function.
814 GenerateLoadArrayFunction(masm, rdi);
815
816 if (FLAG_debug_code) {
817 // Initial map for the builtin Array function shoud be a map.
818 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
819 // Will both indicate a NULL and a Smi.
820 ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000821 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
822 __ Check(not_smi, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000823 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000824 __ Check(equal, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000825 }
826
827 // Run the native code for the Array function called as a normal function.
828 ArrayNativeCode(masm, &generic_array_code);
829
830 // Jump to the generic array code in case the specialized code cannot handle
831 // the construction.
832 __ bind(&generic_array_code);
833 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
834 Handle<Code> array_code(code);
835 __ Jump(array_code, RelocInfo::CODE_TARGET);
836}
837
838
839void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
840 // ----------- S t a t e -------------
841 // -- rax : argc
842 // -- rdi : constructor
843 // -- rsp[0] : return address
844 // -- rsp[8] : last argument
845 // -----------------------------------
846 Label generic_constructor;
847
848 if (FLAG_debug_code) {
849 // The array construct code is only set for the builtin Array function which
850 // does always have a map.
851 GenerateLoadArrayFunction(masm, rbx);
852 __ cmpq(rdi, rbx);
Steve Block3ce2e202009-11-05 08:53:23 +0000853 __ Check(equal, "Unexpected Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000854 // Initial map for the builtin Array function should be a map.
855 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
856 // Will both indicate a NULL and a Smi.
857 ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000858 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
859 __ Check(not_smi, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000860 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000861 __ Check(equal, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000862 }
863
864 // Run the native code for the Array function called as constructor.
865 ArrayNativeCode(masm, &generic_constructor);
866
867 // Jump to the generic construct code in case the specialized code cannot
868 // handle the construction.
869 __ bind(&generic_constructor);
870 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
871 Handle<Code> generic_construct_stub(code);
872 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
873}
874
875
876void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
877 // ----------- S t a t e -------------
878 // -- rax: number of arguments
879 // -- rdi: constructor function
880 // -----------------------------------
881
882 Label non_function_call;
883 // Check that function is not a smi.
884 __ JumpIfSmi(rdi, &non_function_call);
885 // Check that function is a JSFunction.
886 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
887 __ j(not_equal, &non_function_call);
888
889 // Jump to the function-specific construct stub.
890 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
891 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
892 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
893 __ jmp(rbx);
894
895 // edi: called object
896 // eax: number of arguments
897 __ bind(&non_function_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000898 // Set expected number of arguments to zero (not changing eax).
899 __ movq(rbx, Immediate(0));
900 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
901 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
902 RelocInfo::CODE_TARGET);
903}
904
905
906void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
907 // Enter a construct frame.
908 __ EnterConstructFrame();
909
910 // Store a smi-tagged arguments count on the stack.
911 __ Integer32ToSmi(rax, rax);
912 __ push(rax);
913
914 // Push the function to invoke on the stack.
915 __ push(rdi);
916
917 // Try to allocate the object without transitioning into C code. If any of the
918 // preconditions is not met, the code bails out to the runtime call.
919 Label rt_call, allocated;
920 if (FLAG_inline_new) {
921 Label undo_allocation;
922
923#ifdef ENABLE_DEBUGGER_SUPPORT
924 ExternalReference debug_step_in_fp =
925 ExternalReference::debug_step_in_fp_address();
926 __ movq(kScratchRegister, debug_step_in_fp);
927 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
928 __ j(not_equal, &rt_call);
929#endif
930
931 // Verified that the constructor is a JSFunction.
932 // Load the initial map and verify that it is in fact a map.
933 // rdi: constructor
934 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
935 // Will both indicate a NULL and a Smi
936 ASSERT(kSmiTag == 0);
937 __ JumpIfSmi(rax, &rt_call);
938 // rdi: constructor
939 // rax: initial map (if proven valid below)
940 __ CmpObjectType(rax, MAP_TYPE, rbx);
941 __ j(not_equal, &rt_call);
942
943 // Check that the constructor is not constructing a JSFunction (see comments
944 // in Runtime_NewObject in runtime.cc). In which case the initial map's
945 // instance type would be JS_FUNCTION_TYPE.
946 // rdi: constructor
947 // rax: initial map
948 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
949 __ j(equal, &rt_call);
950
951 // Now allocate the JSObject on the heap.
952 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
953 __ shl(rdi, Immediate(kPointerSizeLog2));
954 // rdi: size of new object
955 __ AllocateInNewSpace(rdi,
956 rbx,
957 rdi,
958 no_reg,
959 &rt_call,
960 NO_ALLOCATION_FLAGS);
961 // Allocated the JSObject, now initialize the fields.
962 // rax: initial map
963 // rbx: JSObject (not HeapObject tagged - the actual address).
964 // rdi: start of next object
965 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
966 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
967 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
968 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
969 // Set extra fields in the newly allocated object.
970 // rax: initial map
971 // rbx: JSObject
972 // rdi: start of next object
973 { Label loop, entry;
974 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
975 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
976 __ jmp(&entry);
977 __ bind(&loop);
978 __ movq(Operand(rcx, 0), rdx);
979 __ addq(rcx, Immediate(kPointerSize));
980 __ bind(&entry);
981 __ cmpq(rcx, rdi);
982 __ j(less, &loop);
983 }
984
985 // Add the object tag to make the JSObject real, so that we can continue and
986 // jump into the continuation code at any time from now on. Any failures
987 // need to undo the allocation, so that the heap is in a consistent state
988 // and verifiable.
989 // rax: initial map
990 // rbx: JSObject
991 // rdi: start of next object
992 __ or_(rbx, Immediate(kHeapObjectTag));
993
994 // Check if a non-empty properties array is needed.
995 // Allocate and initialize a FixedArray if it is.
996 // rax: initial map
997 // rbx: JSObject
998 // rdi: start of next object
999 // Calculate total properties described map.
1000 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
1001 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
1002 __ addq(rdx, rcx);
1003 // Calculate unused properties past the end of the in-object properties.
1004 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
1005 __ subq(rdx, rcx);
1006 // Done if no extra properties are to be allocated.
1007 __ j(zero, &allocated);
1008 __ Assert(positive, "Property allocation count failed.");
1009
1010 // Scale the number of elements by pointer size and add the header for
1011 // FixedArrays to the start of the next object calculation from above.
1012 // rbx: JSObject
1013 // rdi: start of next object (will be start of FixedArray)
1014 // rdx: number of elements in properties array
1015 __ AllocateInNewSpace(FixedArray::kHeaderSize,
1016 times_pointer_size,
1017 rdx,
1018 rdi,
1019 rax,
1020 no_reg,
1021 &undo_allocation,
1022 RESULT_CONTAINS_TOP);
1023
1024 // Initialize the FixedArray.
1025 // rbx: JSObject
1026 // rdi: FixedArray
1027 // rdx: number of elements
1028 // rax: start of next object
1029 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
1030 __ movq(Operand(rdi, JSObject::kMapOffset), rcx); // setup the map
1031 __ movl(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
1032
1033 // Initialize the fields to undefined.
1034 // rbx: JSObject
1035 // rdi: FixedArray
1036 // rax: start of next object
1037 // rdx: number of elements
1038 { Label loop, entry;
1039 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1040 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
1041 __ jmp(&entry);
1042 __ bind(&loop);
1043 __ movq(Operand(rcx, 0), rdx);
1044 __ addq(rcx, Immediate(kPointerSize));
1045 __ bind(&entry);
1046 __ cmpq(rcx, rax);
1047 __ j(below, &loop);
1048 }
1049
1050 // Store the initialized FixedArray into the properties field of
1051 // the JSObject
1052 // rbx: JSObject
1053 // rdi: FixedArray
1054 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
1055 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
1056
1057
1058 // Continue with JSObject being successfully allocated
1059 // rbx: JSObject
1060 __ jmp(&allocated);
1061
1062 // Undo the setting of the new top so that the heap is verifiable. For
1063 // example, the map's unused properties potentially do not match the
1064 // allocated objects unused properties.
1065 // rbx: JSObject (previous new top)
1066 __ bind(&undo_allocation);
1067 __ UndoAllocationInNewSpace(rbx);
1068 }
1069
1070 // Allocate the new receiver object using the runtime call.
1071 // rdi: function (constructor)
1072 __ bind(&rt_call);
1073 // Must restore rdi (constructor) before calling runtime.
1074 __ movq(rdi, Operand(rsp, 0));
1075 __ push(rdi);
1076 __ CallRuntime(Runtime::kNewObject, 1);
1077 __ movq(rbx, rax); // store result in rbx
1078
1079 // New object allocated.
1080 // rbx: newly allocated object
1081 __ bind(&allocated);
1082 // Retrieve the function from the stack.
1083 __ pop(rdi);
1084
1085 // Retrieve smi-tagged arguments count from the stack.
1086 __ movq(rax, Operand(rsp, 0));
1087 __ SmiToInteger32(rax, rax);
1088
1089 // Push the allocated receiver to the stack. We need two copies
1090 // because we may have to return the original one and the calling
1091 // conventions dictate that the called function pops the receiver.
1092 __ push(rbx);
1093 __ push(rbx);
1094
1095 // Setup pointer to last argument.
1096 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
1097
1098 // Copy arguments and receiver to the expression stack.
1099 Label loop, entry;
1100 __ movq(rcx, rax);
1101 __ jmp(&entry);
1102 __ bind(&loop);
1103 __ push(Operand(rbx, rcx, times_pointer_size, 0));
1104 __ bind(&entry);
1105 __ decq(rcx);
1106 __ j(greater_equal, &loop);
1107
1108 // Call the function.
1109 ParameterCount actual(rax);
1110 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
1111
1112 // Restore context from the frame.
1113 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1114
1115 // If the result is an object (in the ECMA sense), we should get rid
1116 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1117 // on page 74.
1118 Label use_receiver, exit;
1119 // If the result is a smi, it is *not* an object in the ECMA sense.
1120 __ JumpIfSmi(rax, &use_receiver);
1121
1122 // If the type of the result (stored in its map) is less than
1123 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
1124 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
1125 __ j(above_equal, &exit);
1126
1127 // Throw away the result of the constructor invocation and use the
1128 // on-stack receiver as the result.
1129 __ bind(&use_receiver);
1130 __ movq(rax, Operand(rsp, 0));
1131
1132 // Restore the arguments count and leave the construct frame.
1133 __ bind(&exit);
1134 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
1135 __ LeaveConstructFrame();
1136
1137 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +00001138 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +00001139 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1140 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001141 __ push(rcx);
1142 __ IncrementCounter(&Counters::constructed_objects, 1);
1143 __ ret(0);
1144}
1145
1146
1147static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1148 bool is_construct) {
1149 // Expects five C++ function parameters.
1150 // - Address entry (ignored)
1151 // - JSFunction* function (
1152 // - Object* receiver
1153 // - int argc
1154 // - Object*** argv
1155 // (see Handle::Invoke in execution.cc).
1156
1157 // Platform specific argument handling. After this, the stack contains
1158 // an internal frame and the pushed function and receiver, and
1159 // register rax and rbx holds the argument count and argument array,
1160 // while rdi holds the function pointer and rsi the context.
1161#ifdef _WIN64
1162 // MSVC parameters in:
1163 // rcx : entry (ignored)
1164 // rdx : function
1165 // r8 : receiver
1166 // r9 : argc
1167 // [rsp+0x20] : argv
1168
1169 // Clear the context before we push it when entering the JS frame.
1170 __ xor_(rsi, rsi);
1171 __ EnterInternalFrame();
1172
1173 // Load the function context into rsi.
1174 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
1175
1176 // Push the function and the receiver onto the stack.
1177 __ push(rdx);
1178 __ push(r8);
1179
1180 // Load the number of arguments and setup pointer to the arguments.
1181 __ movq(rax, r9);
1182 // Load the previous frame pointer to access C argument on stack
1183 __ movq(kScratchRegister, Operand(rbp, 0));
1184 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
1185 // Load the function pointer into rdi.
1186 __ movq(rdi, rdx);
1187#else // !defined(_WIN64)
1188 // GCC parameters in:
1189 // rdi : entry (ignored)
1190 // rsi : function
1191 // rdx : receiver
1192 // rcx : argc
1193 // r8 : argv
1194
1195 __ movq(rdi, rsi);
1196 // rdi : function
1197
1198 // Clear the context before we push it when entering the JS frame.
1199 __ xor_(rsi, rsi);
1200 // Enter an internal frame.
1201 __ EnterInternalFrame();
1202
1203 // Push the function and receiver and setup the context.
1204 __ push(rdi);
1205 __ push(rdx);
1206 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1207
1208 // Load the number of arguments and setup pointer to the arguments.
1209 __ movq(rax, rcx);
1210 __ movq(rbx, r8);
1211#endif // _WIN64
1212
1213 // Set up the roots register.
1214 ExternalReference roots_address = ExternalReference::roots_address();
1215 __ movq(r13, roots_address);
1216
1217 // Current stack contents:
1218 // [rsp + 2 * kPointerSize ... ]: Internal frame
1219 // [rsp + kPointerSize] : function
1220 // [rsp] : receiver
1221 // Current register contents:
1222 // rax : argc
1223 // rbx : argv
1224 // rsi : context
1225 // rdi : function
1226
1227 // Copy arguments to the stack in a loop.
1228 // Register rbx points to array of pointers to handle locations.
1229 // Push the values of these handles.
1230 Label loop, entry;
1231 __ xor_(rcx, rcx); // Set loop variable to 0.
1232 __ jmp(&entry);
1233 __ bind(&loop);
1234 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
1235 __ push(Operand(kScratchRegister, 0)); // dereference handle
1236 __ addq(rcx, Immediate(1));
1237 __ bind(&entry);
1238 __ cmpq(rcx, rax);
1239 __ j(not_equal, &loop);
1240
1241 // Invoke the code.
1242 if (is_construct) {
1243 // Expects rdi to hold function pointer.
1244 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
1245 RelocInfo::CODE_TARGET);
1246 } else {
1247 ParameterCount actual(rax);
1248 // Function must be in rdi.
1249 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
1250 }
1251
1252 // Exit the JS frame. Notice that this also removes the empty
1253 // context and the function left on the stack by the code
1254 // invocation.
1255 __ LeaveInternalFrame();
1256 // TODO(X64): Is argument correct? Is there a receiver to remove?
1257 __ ret(1 * kPointerSize); // remove receiver
1258}
1259
1260
1261void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1262 Generate_JSEntryTrampolineHelper(masm, false);
1263}
1264
1265
1266void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1267 Generate_JSEntryTrampolineHelper(masm, true);
1268}
1269
1270} } // namespace v8::internal