blob: 0dead6b793cfe0077f0c1ed527bf8657563527b5 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010029
30#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
33#include "macro-assembler.h"
34
35namespace v8 {
36namespace internal {
37
38#define __ ACCESS_MASM(masm)
39
Steve Blocka7e24c12009-10-30 11:49:00 +000040
Leon Clarkee46be812010-01-19 14:06:41 +000041void Builtins::Generate_Adaptor(MacroAssembler* masm,
42 CFunctionId id,
43 BuiltinExtraArguments extra_args) {
44 // ----------- S t a t e -------------
45 // -- rax : number of arguments excluding receiver
46 // -- rdi : called function (only guaranteed when
47 // extra_args requires it)
48 // -- rsi : context
49 // -- rsp[0] : return address
50 // -- rsp[8] : last argument
51 // -- ...
52 // -- rsp[8 * argc] : first argument (argc == rax)
53 // -- rsp[8 * (argc +1)] : receiver
54 // -----------------------------------
55
56 // Insert extra arguments.
57 int num_extra_args = 0;
58 if (extra_args == NEEDS_CALLED_FUNCTION) {
59 num_extra_args = 1;
60 __ pop(kScratchRegister); // Save return address.
61 __ push(rdi);
62 __ push(kScratchRegister); // Restore return address.
63 } else {
64 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
65 }
66
Steve Block6ded16b2010-05-10 14:33:55 +010067 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000068 // including the receiver and the extra arguments.
69 __ addq(rax, Immediate(num_extra_args + 1));
Steve Block6ded16b2010-05-10 14:33:55 +010070 __ JumpToExternalReference(ExternalReference(id), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000071}
72
73
74static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
75 __ push(rbp);
76 __ movq(rbp, rsp);
77
78 // Store the arguments adaptor context sentinel.
Steve Block3ce2e202009-11-05 08:53:23 +000079 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +000080
81 // Push the function on the stack.
82 __ push(rdi);
83
84 // Preserve the number of arguments on the stack. Must preserve both
85 // rax and rbx because these registers are used when copying the
86 // arguments and the receiver.
87 __ Integer32ToSmi(rcx, rax);
88 __ push(rcx);
89}
90
91
92static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
93 // Retrieve the number of arguments from the stack. Number is a Smi.
94 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
95
96 // Leave the frame.
97 __ movq(rsp, rbp);
98 __ pop(rbp);
99
100 // Remove caller arguments from the stack.
Steve Blocka7e24c12009-10-30 11:49:00 +0000101 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000102 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
103 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000104 __ push(rcx);
105}
106
107
108void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
109 // ----------- S t a t e -------------
110 // -- rax : actual number of arguments
111 // -- rbx : expected number of arguments
112 // -- rdx : code entry to call
113 // -----------------------------------
114
115 Label invoke, dont_adapt_arguments;
116 __ IncrementCounter(&Counters::arguments_adaptors, 1);
117
118 Label enough, too_few;
119 __ cmpq(rax, rbx);
120 __ j(less, &too_few);
121 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
122 __ j(equal, &dont_adapt_arguments);
123
124 { // Enough parameters: Actual >= expected.
125 __ bind(&enough);
126 EnterArgumentsAdaptorFrame(masm);
127
128 // Copy receiver and all expected arguments.
129 const int offset = StandardFrameConstants::kCallerSPOffset;
130 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
131 __ movq(rcx, Immediate(-1)); // account for receiver
132
133 Label copy;
134 __ bind(&copy);
135 __ incq(rcx);
136 __ push(Operand(rax, 0));
137 __ subq(rax, Immediate(kPointerSize));
138 __ cmpq(rcx, rbx);
139 __ j(less, &copy);
140 __ jmp(&invoke);
141 }
142
143 { // Too few parameters: Actual < expected.
144 __ bind(&too_few);
145 EnterArgumentsAdaptorFrame(masm);
146
147 // Copy receiver and all actual arguments.
148 const int offset = StandardFrameConstants::kCallerSPOffset;
149 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
150 __ movq(rcx, Immediate(-1)); // account for receiver
151
152 Label copy;
153 __ bind(&copy);
154 __ incq(rcx);
155 __ push(Operand(rdi, 0));
156 __ subq(rdi, Immediate(kPointerSize));
157 __ cmpq(rcx, rax);
158 __ j(less, &copy);
159
160 // Fill remaining expected arguments with undefined values.
161 Label fill;
162 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
163 __ bind(&fill);
164 __ incq(rcx);
165 __ push(kScratchRegister);
166 __ cmpq(rcx, rbx);
167 __ j(less, &fill);
168
169 // Restore function pointer.
170 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
171 }
172
173 // Call the entry point.
174 __ bind(&invoke);
175 __ call(rdx);
176
177 // Leave frame and return.
178 LeaveArgumentsAdaptorFrame(masm);
179 __ ret(0);
180
181 // -------------------------------------------
182 // Dont adapt arguments.
183 // -------------------------------------------
184 __ bind(&dont_adapt_arguments);
185 __ jmp(rdx);
186}
187
188
189void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
190 // Stack Layout:
Andrei Popescu402d9372010-02-26 13:31:12 +0000191 // rsp[0]: Return address
192 // rsp[1]: Argument n
193 // rsp[2]: Argument n-1
Steve Blocka7e24c12009-10-30 11:49:00 +0000194 // ...
Andrei Popescu402d9372010-02-26 13:31:12 +0000195 // rsp[n]: Argument 1
196 // rsp[n+1]: Receiver (function to call)
Steve Blocka7e24c12009-10-30 11:49:00 +0000197 //
Andrei Popescu402d9372010-02-26 13:31:12 +0000198 // rax contains the number of arguments, n, not counting the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000199 //
200 // 1. Make sure we have at least one argument.
201 { Label done;
202 __ testq(rax, rax);
203 __ j(not_zero, &done);
204 __ pop(rbx);
205 __ Push(Factory::undefined_value());
206 __ push(rbx);
207 __ incq(rax);
208 __ bind(&done);
209 }
210
Andrei Popescu402d9372010-02-26 13:31:12 +0000211 // 2. Get the function to call (passed as receiver) from the stack, check
212 // if it is a function.
213 Label non_function;
214 // The function to call is at position n+1 on the stack.
215 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
216 __ JumpIfSmi(rdi, &non_function);
217 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
218 __ j(not_equal, &non_function);
Steve Blocka7e24c12009-10-30 11:49:00 +0000219
Andrei Popescu402d9372010-02-26 13:31:12 +0000220 // 3a. Patch the first argument if necessary when calling a function.
221 Label shift_arguments;
222 { Label convert_to_object, use_global_receiver, patch_receiver;
223 // Change context eagerly in case we need the global receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000224 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
225
Steve Blocka7e24c12009-10-30 11:49:00 +0000226 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
Andrei Popescu402d9372010-02-26 13:31:12 +0000227 __ JumpIfSmi(rbx, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000228
229 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
230 __ j(equal, &use_global_receiver);
231 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
232 __ j(equal, &use_global_receiver);
233
234 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
Andrei Popescu402d9372010-02-26 13:31:12 +0000235 __ j(below, &convert_to_object);
Steve Blocka7e24c12009-10-30 11:49:00 +0000236 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +0000237 __ j(below_equal, &shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000238
Andrei Popescu402d9372010-02-26 13:31:12 +0000239 __ bind(&convert_to_object);
240 __ EnterInternalFrame(); // In order to preserve argument count.
Steve Blocka7e24c12009-10-30 11:49:00 +0000241 __ Integer32ToSmi(rax, rax);
242 __ push(rax);
243
Steve Blocka7e24c12009-10-30 11:49:00 +0000244 __ push(rbx);
245 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
246 __ movq(rbx, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000247
Steve Blocka7e24c12009-10-30 11:49:00 +0000248 __ pop(rax);
249 __ SmiToInteger32(rax, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000250 __ LeaveInternalFrame();
Andrei Popescu402d9372010-02-26 13:31:12 +0000251 // Restore the function to rdi.
252 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000253 __ jmp(&patch_receiver);
254
Andrei Popescu402d9372010-02-26 13:31:12 +0000255 // Use the global receiver object from the called function as the
256 // receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000257 __ bind(&use_global_receiver);
258 const int kGlobalIndex =
259 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
260 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
Steve Blockd0582a62009-12-15 09:54:21 +0000261 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
262 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
Steve Blocka7e24c12009-10-30 11:49:00 +0000263 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
264
265 __ bind(&patch_receiver);
266 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
267
Andrei Popescu402d9372010-02-26 13:31:12 +0000268 __ jmp(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000269 }
270
Andrei Popescu402d9372010-02-26 13:31:12 +0000271
272 // 3b. Patch the first argument when calling a non-function. The
273 // CALL_NON_FUNCTION builtin expects the non-function callee as
274 // receiver, so overwrite the first argument which will ultimately
275 // become the receiver.
276 __ bind(&non_function);
277 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
278 __ xor_(rdi, rdi);
279
280 // 4. Shift arguments and return address one slot down on the stack
281 // (overwriting the original receiver). Adjust argument count to make
282 // the original first argument the new receiver.
283 __ bind(&shift_arguments);
Steve Blocka7e24c12009-10-30 11:49:00 +0000284 { Label loop;
Andrei Popescu402d9372010-02-26 13:31:12 +0000285 __ movq(rcx, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000286 __ bind(&loop);
287 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
288 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
289 __ decq(rcx);
Andrei Popescu402d9372010-02-26 13:31:12 +0000290 __ j(not_sign, &loop); // While non-negative (to copy return address).
291 __ pop(rbx); // Discard copy of return address.
292 __ decq(rax); // One fewer argument (first argument is new receiver).
Steve Blocka7e24c12009-10-30 11:49:00 +0000293 }
294
Andrei Popescu402d9372010-02-26 13:31:12 +0000295 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
296 { Label function;
Steve Blocka7e24c12009-10-30 11:49:00 +0000297 __ testq(rdi, rdi);
Andrei Popescu402d9372010-02-26 13:31:12 +0000298 __ j(not_zero, &function);
Steve Blocka7e24c12009-10-30 11:49:00 +0000299 __ xor_(rbx, rbx);
300 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +0000301 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
302 RelocInfo::CODE_TARGET);
Andrei Popescu402d9372010-02-26 13:31:12 +0000303 __ bind(&function);
Steve Blocka7e24c12009-10-30 11:49:00 +0000304 }
305
Andrei Popescu402d9372010-02-26 13:31:12 +0000306 // 5b. Get the code to call from the function and check that the number of
307 // expected arguments matches what we're providing. If so, jump
308 // (tail-call) to the code in register edx without checking arguments.
309 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
310 __ movsxlq(rbx,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100311 FieldOperand(rdx,
312 SharedFunctionInfo::kFormalParameterCountOffset));
Steve Block791712a2010-08-27 10:21:07 +0100313 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +0000314 __ cmpq(rax, rbx);
315 __ j(not_equal,
316 Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
317 RelocInfo::CODE_TARGET);
318
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 ParameterCount expected(0);
320 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
321}
322
323
324void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
325 // Stack at entry:
326 // rsp: return address
327 // rsp+8: arguments
328 // rsp+16: receiver ("this")
329 // rsp+24: function
330 __ EnterInternalFrame();
331 // Stack frame:
332 // rbp: Old base pointer
333 // rbp[1]: return address
334 // rbp[2]: function arguments
335 // rbp[3]: receiver
336 // rbp[4]: function
337 static const int kArgumentsOffset = 2 * kPointerSize;
338 static const int kReceiverOffset = 3 * kPointerSize;
339 static const int kFunctionOffset = 4 * kPointerSize;
340 __ push(Operand(rbp, kFunctionOffset));
341 __ push(Operand(rbp, kArgumentsOffset));
342 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
343
Steve Blockd0582a62009-12-15 09:54:21 +0000344 // Check the stack for overflow. We are not trying need to catch
345 // interruptions (e.g. debug break and preemption) here, so the "real stack
346 // limit" is checked.
347 Label okay;
348 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
349 __ movq(rcx, rsp);
350 // Make rcx the space we have left. The stack might already be overflowed
351 // here which will cause rcx to become negative.
352 __ subq(rcx, kScratchRegister);
353 // Make rdx the space we need for the array when it is unrolled onto the
354 // stack.
355 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
356 // Check if the arguments will overflow the stack.
357 __ cmpq(rcx, rdx);
358 __ j(greater, &okay); // Signed comparison.
Steve Blocka7e24c12009-10-30 11:49:00 +0000359
Steve Blockd0582a62009-12-15 09:54:21 +0000360 // Out of stack space.
361 __ push(Operand(rbp, kFunctionOffset));
362 __ push(rax);
363 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
364 __ bind(&okay);
365 // End of stack check.
Steve Blocka7e24c12009-10-30 11:49:00 +0000366
367 // Push current index and limit.
368 const int kLimitOffset =
369 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
370 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
371 __ push(rax); // limit
372 __ push(Immediate(0)); // index
373
374 // Change context eagerly to get the right global object if
375 // necessary.
376 __ movq(rdi, Operand(rbp, kFunctionOffset));
377 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
378
379 // Compute the receiver.
380 Label call_to_object, use_global_receiver, push_receiver;
381 __ movq(rbx, Operand(rbp, kReceiverOffset));
382 __ JumpIfSmi(rbx, &call_to_object);
383 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
384 __ j(equal, &use_global_receiver);
385 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
386 __ j(equal, &use_global_receiver);
387
388 // If given receiver is already a JavaScript object then there's no
389 // reason for converting it.
390 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
391 __ j(below, &call_to_object);
392 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
393 __ j(below_equal, &push_receiver);
394
395 // Convert the receiver to an object.
396 __ bind(&call_to_object);
397 __ push(rbx);
398 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
399 __ movq(rbx, rax);
400 __ jmp(&push_receiver);
401
402 // Use the current global receiver object as the receiver.
403 __ bind(&use_global_receiver);
404 const int kGlobalOffset =
405 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
406 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
Steve Blockd0582a62009-12-15 09:54:21 +0000407 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
408 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000409 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
410
411 // Push the receiver.
412 __ bind(&push_receiver);
413 __ push(rbx);
414
415 // Copy all arguments from the array to the stack.
416 Label entry, loop;
417 __ movq(rax, Operand(rbp, kIndexOffset));
418 __ jmp(&entry);
419 __ bind(&loop);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100420 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000421
422 // Use inline caching to speed up access to arguments.
423 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
424 __ Call(ic, RelocInfo::CODE_TARGET);
425 // It is important that we do not have a test instruction after the
426 // call. A test instruction after the call is used to indicate that
427 // we have generated an inline version of the keyed load. In this
428 // case, we know that we are not generating a test instruction next.
429
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100430 // Push the nth argument.
Steve Blocka7e24c12009-10-30 11:49:00 +0000431 __ push(rax);
432
433 // Update the index on the stack and in register rax.
434 __ movq(rax, Operand(rbp, kIndexOffset));
Steve Block3ce2e202009-11-05 08:53:23 +0000435 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +0000436 __ movq(Operand(rbp, kIndexOffset), rax);
437
438 __ bind(&entry);
439 __ cmpq(rax, Operand(rbp, kLimitOffset));
440 __ j(not_equal, &loop);
441
442 // Invoke the function.
443 ParameterCount actual(rax);
444 __ SmiToInteger32(rax, rax);
445 __ movq(rdi, Operand(rbp, kFunctionOffset));
446 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
447
448 __ LeaveInternalFrame();
449 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
450}
451
452
453// Load the built-in Array function from the current context.
454static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
455 // Load the global context.
456 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
457 __ movq(result, FieldOperand(result, GlobalObject::kGlobalContextOffset));
458 // Load the Array function from the global context.
459 __ movq(result,
460 Operand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
461}
462
463
464// Number of empty elements to allocate for an empty array.
465static const int kPreallocatedArrayElements = 4;
466
467
468// Allocate an empty JSArray. The allocated array is put into the result
469// register. If the parameter initial_capacity is larger than zero an elements
470// backing store is allocated with this size and filled with the hole values.
471// Otherwise the elements backing store is set to the empty FixedArray.
472static void AllocateEmptyJSArray(MacroAssembler* masm,
473 Register array_function,
474 Register result,
475 Register scratch1,
476 Register scratch2,
477 Register scratch3,
478 int initial_capacity,
479 Label* gc_required) {
480 ASSERT(initial_capacity >= 0);
481
482 // Load the initial map from the array function.
483 __ movq(scratch1, FieldOperand(array_function,
484 JSFunction::kPrototypeOrInitialMapOffset));
485
486 // Allocate the JSArray object together with space for a fixed array with the
487 // requested elements.
488 int size = JSArray::kSize;
489 if (initial_capacity > 0) {
490 size += FixedArray::SizeFor(initial_capacity);
491 }
492 __ AllocateInNewSpace(size,
493 result,
494 scratch2,
495 scratch3,
496 gc_required,
497 TAG_OBJECT);
498
499 // Allocated the JSArray. Now initialize the fields except for the elements
500 // array.
501 // result: JSObject
502 // scratch1: initial map
503 // scratch2: start of next object
504 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
505 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
506 Factory::empty_fixed_array());
507 // Field JSArray::kElementsOffset is initialized later.
Steve Block3ce2e202009-11-05 08:53:23 +0000508 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000509
510 // If no storage is requested for the elements array just set the empty
511 // fixed array.
512 if (initial_capacity == 0) {
513 __ Move(FieldOperand(result, JSArray::kElementsOffset),
514 Factory::empty_fixed_array());
515 return;
516 }
517
518 // Calculate the location of the elements array and set elements array member
519 // of the JSArray.
520 // result: JSObject
521 // scratch2: start of next object
522 __ lea(scratch1, Operand(result, JSArray::kSize));
523 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
524
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100525 // Initialize the FixedArray and fill it with holes. FixedArray length is
Steve Blocka7e24c12009-10-30 11:49:00 +0000526 // stored as a smi.
527 // result: JSObject
528 // scratch1: elements array
529 // scratch2: start of next object
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100530 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
Steve Blocka7e24c12009-10-30 11:49:00 +0000531 Factory::fixed_array_map());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100532 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
533 Smi::FromInt(initial_capacity));
Steve Blocka7e24c12009-10-30 11:49:00 +0000534
535 // Fill the FixedArray with the hole value. Inline the code if short.
536 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
537 static const int kLoopUnfoldLimit = 4;
538 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
539 __ Move(scratch3, Factory::the_hole_value());
540 if (initial_capacity <= kLoopUnfoldLimit) {
541 // Use a scratch register here to have only one reloc info when unfolding
542 // the loop.
543 for (int i = 0; i < initial_capacity; i++) {
544 __ movq(FieldOperand(scratch1,
545 FixedArray::kHeaderSize + i * kPointerSize),
546 scratch3);
547 }
548 } else {
549 Label loop, entry;
550 __ jmp(&entry);
551 __ bind(&loop);
552 __ movq(Operand(scratch1, 0), scratch3);
553 __ addq(scratch1, Immediate(kPointerSize));
554 __ bind(&entry);
555 __ cmpq(scratch1, scratch2);
556 __ j(below, &loop);
557 }
558}
559
560
561// Allocate a JSArray with the number of elements stored in a register. The
562// register array_function holds the built-in Array function and the register
563// array_size holds the size of the array as a smi. The allocated array is put
564// into the result register and beginning and end of the FixedArray elements
565// storage is put into registers elements_array and elements_array_end (see
566// below for when that is not the case). If the parameter fill_with_holes is
567// true the allocated elements backing store is filled with the hole values
568// otherwise it is left uninitialized. When the backing store is filled the
569// register elements_array is scratched.
570static void AllocateJSArray(MacroAssembler* masm,
571 Register array_function, // Array function.
572 Register array_size, // As a smi.
573 Register result,
574 Register elements_array,
575 Register elements_array_end,
576 Register scratch,
577 bool fill_with_hole,
578 Label* gc_required) {
579 Label not_empty, allocated;
580
581 // Load the initial map from the array function.
582 __ movq(elements_array,
583 FieldOperand(array_function,
584 JSFunction::kPrototypeOrInitialMapOffset));
585
586 // Check whether an empty sized array is requested.
587 __ testq(array_size, array_size);
588 __ j(not_zero, &not_empty);
589
590 // If an empty array is requested allocate a small elements array anyway. This
591 // keeps the code below free of special casing for the empty array.
592 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
593 __ AllocateInNewSpace(size,
594 result,
595 elements_array_end,
596 scratch,
597 gc_required,
598 TAG_OBJECT);
599 __ jmp(&allocated);
600
601 // Allocate the JSArray object together with space for a FixedArray with the
602 // requested elements.
603 __ bind(&not_empty);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100604 SmiIndex index =
605 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000606 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100607 index.scale,
608 index.reg,
Steve Blocka7e24c12009-10-30 11:49:00 +0000609 result,
610 elements_array_end,
611 scratch,
612 gc_required,
613 TAG_OBJECT);
614
615 // Allocated the JSArray. Now initialize the fields except for the elements
616 // array.
617 // result: JSObject
618 // elements_array: initial map
619 // elements_array_end: start of next object
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100620 // array_size: size of array (smi)
Steve Blocka7e24c12009-10-30 11:49:00 +0000621 __ bind(&allocated);
622 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
623 __ Move(elements_array, Factory::empty_fixed_array());
624 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
625 // Field JSArray::kElementsOffset is initialized later.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100626 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000627
628 // Calculate the location of the elements array and set elements array member
629 // of the JSArray.
630 // result: JSObject
631 // elements_array_end: start of next object
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100632 // array_size: size of array (smi)
Steve Blocka7e24c12009-10-30 11:49:00 +0000633 __ lea(elements_array, Operand(result, JSArray::kSize));
634 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
635
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100636 // Initialize the fixed array. FixedArray length is stored as a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +0000637 // result: JSObject
638 // elements_array: elements array
639 // elements_array_end: start of next object
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100640 // array_size: size of array (smi)
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
642 Factory::fixed_array_map());
643 Label not_empty_2, fill_array;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100644 __ SmiTest(array_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 __ j(not_zero, &not_empty_2);
646 // Length of the FixedArray is the number of pre-allocated elements even
647 // though the actual JSArray has length 0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100648 __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
649 Smi::FromInt(kPreallocatedArrayElements));
Steve Blocka7e24c12009-10-30 11:49:00 +0000650 __ jmp(&fill_array);
651 __ bind(&not_empty_2);
652 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
653 // same.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100654 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
Steve Blocka7e24c12009-10-30 11:49:00 +0000655
656 // Fill the allocated FixedArray with the hole value if requested.
657 // result: JSObject
658 // elements_array: elements array
659 // elements_array_end: start of next object
660 __ bind(&fill_array);
661 if (fill_with_hole) {
662 Label loop, entry;
663 __ Move(scratch, Factory::the_hole_value());
664 __ lea(elements_array, Operand(elements_array,
665 FixedArray::kHeaderSize - kHeapObjectTag));
666 __ jmp(&entry);
667 __ bind(&loop);
668 __ movq(Operand(elements_array, 0), scratch);
669 __ addq(elements_array, Immediate(kPointerSize));
670 __ bind(&entry);
671 __ cmpq(elements_array, elements_array_end);
672 __ j(below, &loop);
673 }
674}
675
676
677// Create a new array for the built-in Array function. This function allocates
678// the JSArray object and the FixedArray elements array and initializes these.
679// If the Array cannot be constructed in native code the runtime is called. This
680// function assumes the following state:
681// rdi: constructor (built-in Array function)
682// rax: argc
683// rsp[0]: return address
684// rsp[8]: last argument
685// This function is used for both construct and normal calls of Array. The only
686// difference between handling a construct call and a normal call is that for a
687// construct call the constructor function in rdi needs to be preserved for
688// entering the generic code. In both cases argc in rax needs to be preserved.
689// Both registers are preserved by this code so no need to differentiate between
690// a construct call and a normal call.
691static void ArrayNativeCode(MacroAssembler* masm,
692 Label *call_generic_code) {
693 Label argc_one_or_more, argc_two_or_more;
694
695 // Check for array construction with zero arguments.
696 __ testq(rax, rax);
697 __ j(not_zero, &argc_one_or_more);
698
699 // Handle construction of an empty array.
700 AllocateEmptyJSArray(masm,
701 rdi,
702 rbx,
703 rcx,
704 rdx,
705 r8,
706 kPreallocatedArrayElements,
707 call_generic_code);
708 __ IncrementCounter(&Counters::array_function_native, 1);
709 __ movq(rax, rbx);
710 __ ret(kPointerSize);
711
712 // Check for one argument. Bail out if argument is not smi or if it is
713 // negative.
714 __ bind(&argc_one_or_more);
715 __ cmpq(rax, Immediate(1));
716 __ j(not_equal, &argc_two_or_more);
717 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
Ben Murdochf87a2032010-10-22 12:50:53 +0100718 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
Steve Blocka7e24c12009-10-30 11:49:00 +0000719
720 // Handle construction of an empty array of a certain size. Bail out if size
721 // is to large to actually allocate an elements array.
Steve Block3ce2e202009-11-05 08:53:23 +0000722 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
723 __ j(greater_equal, call_generic_code);
Steve Blocka7e24c12009-10-30 11:49:00 +0000724
725 // rax: argc
726 // rdx: array_size (smi)
727 // rdi: constructor
728 // esp[0]: return address
729 // esp[8]: argument
730 AllocateJSArray(masm,
731 rdi,
732 rdx,
733 rbx,
734 rcx,
735 r8,
736 r9,
737 true,
738 call_generic_code);
739 __ IncrementCounter(&Counters::array_function_native, 1);
740 __ movq(rax, rbx);
741 __ ret(2 * kPointerSize);
742
743 // Handle construction of an array from a list of arguments.
744 __ bind(&argc_two_or_more);
745 __ movq(rdx, rax);
746 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
747 // rax: argc
748 // rdx: array_size (smi)
749 // rdi: constructor
750 // esp[0] : return address
751 // esp[8] : last argument
752 AllocateJSArray(masm,
753 rdi,
754 rdx,
755 rbx,
756 rcx,
757 r8,
758 r9,
759 false,
760 call_generic_code);
761 __ IncrementCounter(&Counters::array_function_native, 1);
762
763 // rax: argc
764 // rbx: JSArray
765 // rcx: elements_array
766 // r8: elements_array_end (untagged)
767 // esp[0]: return address
768 // esp[8]: last argument
769
770 // Location of the last argument
771 __ lea(r9, Operand(rsp, kPointerSize));
772
773 // Location of the first array element (Parameter fill_with_holes to
774 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
775 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
776
777 // rax: argc
778 // rbx: JSArray
779 // rdx: location of the first array element
780 // r9: location of the last argument
781 // esp[0]: return address
782 // esp[8]: last argument
783 Label loop, entry;
784 __ movq(rcx, rax);
785 __ jmp(&entry);
786 __ bind(&loop);
787 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
788 __ movq(Operand(rdx, 0), kScratchRegister);
789 __ addq(rdx, Immediate(kPointerSize));
790 __ bind(&entry);
791 __ decq(rcx);
792 __ j(greater_equal, &loop);
793
794 // Remove caller arguments from the stack and return.
795 // rax: argc
796 // rbx: JSArray
797 // esp[0]: return address
798 // esp[8]: last argument
799 __ pop(rcx);
800 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
801 __ push(rcx);
802 __ movq(rax, rbx);
803 __ ret(0);
804}
805
806
807void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
808 // ----------- S t a t e -------------
809 // -- rax : argc
810 // -- rsp[0] : return address
811 // -- rsp[8] : last argument
812 // -----------------------------------
813 Label generic_array_code;
814
815 // Get the Array function.
816 GenerateLoadArrayFunction(masm, rdi);
817
818 if (FLAG_debug_code) {
819 // Initial map for the builtin Array function shoud be a map.
820 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
821 // Will both indicate a NULL and a Smi.
822 ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000823 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
824 __ Check(not_smi, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000825 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000826 __ Check(equal, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 }
828
829 // Run the native code for the Array function called as a normal function.
830 ArrayNativeCode(masm, &generic_array_code);
831
832 // Jump to the generic array code in case the specialized code cannot handle
833 // the construction.
834 __ bind(&generic_array_code);
835 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
836 Handle<Code> array_code(code);
837 __ Jump(array_code, RelocInfo::CODE_TARGET);
838}
839
840
841void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
842 // ----------- S t a t e -------------
843 // -- rax : argc
844 // -- rdi : constructor
845 // -- rsp[0] : return address
846 // -- rsp[8] : last argument
847 // -----------------------------------
848 Label generic_constructor;
849
850 if (FLAG_debug_code) {
851 // The array construct code is only set for the builtin Array function which
852 // does always have a map.
853 GenerateLoadArrayFunction(masm, rbx);
854 __ cmpq(rdi, rbx);
Steve Block3ce2e202009-11-05 08:53:23 +0000855 __ Check(equal, "Unexpected Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000856 // Initial map for the builtin Array function should be a map.
857 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
858 // Will both indicate a NULL and a Smi.
859 ASSERT(kSmiTag == 0);
Steve Block3ce2e202009-11-05 08:53:23 +0000860 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
861 __ Check(not_smi, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000862 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000863 __ Check(equal, "Unexpected initial map for Array function");
Steve Blocka7e24c12009-10-30 11:49:00 +0000864 }
865
866 // Run the native code for the Array function called as constructor.
867 ArrayNativeCode(masm, &generic_constructor);
868
869 // Jump to the generic construct code in case the specialized code cannot
870 // handle the construction.
871 __ bind(&generic_constructor);
872 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
873 Handle<Code> generic_construct_stub(code);
874 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
875}
876
877
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100878void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
879 // TODO(849): implement custom construct stub.
880 // Generate a copy of the generic stub for now.
881 Generate_JSConstructStubGeneric(masm);
882}
883
884
Steve Blocka7e24c12009-10-30 11:49:00 +0000885void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
886 // ----------- S t a t e -------------
887 // -- rax: number of arguments
888 // -- rdi: constructor function
889 // -----------------------------------
890
891 Label non_function_call;
892 // Check that function is not a smi.
893 __ JumpIfSmi(rdi, &non_function_call);
894 // Check that function is a JSFunction.
895 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
896 __ j(not_equal, &non_function_call);
897
898 // Jump to the function-specific construct stub.
899 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
900 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
901 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
902 __ jmp(rbx);
903
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100904 // rdi: called object
905 // rax: number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +0000906 __ bind(&non_function_call);
Andrei Popescu402d9372010-02-26 13:31:12 +0000907 // Set expected number of arguments to zero (not changing rax).
Steve Blocka7e24c12009-10-30 11:49:00 +0000908 __ movq(rbx, Immediate(0));
909 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
910 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
911 RelocInfo::CODE_TARGET);
912}
913
914
Leon Clarkee46be812010-01-19 14:06:41 +0000915static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100916 bool is_api_function,
917 bool count_constructions) {
918 // Should never count constructions for api objects.
919 ASSERT(!is_api_function || !count_constructions);
920
Steve Blocka7e24c12009-10-30 11:49:00 +0000921 // Enter a construct frame.
922 __ EnterConstructFrame();
923
924 // Store a smi-tagged arguments count on the stack.
925 __ Integer32ToSmi(rax, rax);
926 __ push(rax);
927
928 // Push the function to invoke on the stack.
929 __ push(rdi);
930
931 // Try to allocate the object without transitioning into C code. If any of the
932 // preconditions is not met, the code bails out to the runtime call.
933 Label rt_call, allocated;
934 if (FLAG_inline_new) {
935 Label undo_allocation;
936
937#ifdef ENABLE_DEBUGGER_SUPPORT
938 ExternalReference debug_step_in_fp =
939 ExternalReference::debug_step_in_fp_address();
940 __ movq(kScratchRegister, debug_step_in_fp);
941 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
942 __ j(not_equal, &rt_call);
943#endif
944
945 // Verified that the constructor is a JSFunction.
946 // Load the initial map and verify that it is in fact a map.
947 // rdi: constructor
948 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
949 // Will both indicate a NULL and a Smi
950 ASSERT(kSmiTag == 0);
951 __ JumpIfSmi(rax, &rt_call);
952 // rdi: constructor
953 // rax: initial map (if proven valid below)
954 __ CmpObjectType(rax, MAP_TYPE, rbx);
955 __ j(not_equal, &rt_call);
956
957 // Check that the constructor is not constructing a JSFunction (see comments
958 // in Runtime_NewObject in runtime.cc). In which case the initial map's
959 // instance type would be JS_FUNCTION_TYPE.
960 // rdi: constructor
961 // rax: initial map
962 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
963 __ j(equal, &rt_call);
964
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100965 if (count_constructions) {
966 Label allocate;
967 // Decrease generous allocation count.
968 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
969 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
970 __ j(not_zero, &allocate);
971
972 __ push(rax);
973 __ push(rdi);
974
975 __ push(rdi); // constructor
976 // The call will replace the stub, so the countdown is only done once.
977 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
978
979 __ pop(rdi);
980 __ pop(rax);
981
982 __ bind(&allocate);
983 }
984
Steve Blocka7e24c12009-10-30 11:49:00 +0000985 // Now allocate the JSObject on the heap.
986 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
987 __ shl(rdi, Immediate(kPointerSizeLog2));
988 // rdi: size of new object
989 __ AllocateInNewSpace(rdi,
990 rbx,
991 rdi,
992 no_reg,
993 &rt_call,
994 NO_ALLOCATION_FLAGS);
995 // Allocated the JSObject, now initialize the fields.
996 // rax: initial map
997 // rbx: JSObject (not HeapObject tagged - the actual address).
998 // rdi: start of next object
999 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
1000 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1001 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
1002 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
1003 // Set extra fields in the newly allocated object.
1004 // rax: initial map
1005 // rbx: JSObject
1006 // rdi: start of next object
1007 { Label loop, entry;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001008 // To allow for truncation.
1009 if (count_constructions) {
1010 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
1011 } else {
1012 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1013 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001014 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
1015 __ jmp(&entry);
1016 __ bind(&loop);
1017 __ movq(Operand(rcx, 0), rdx);
1018 __ addq(rcx, Immediate(kPointerSize));
1019 __ bind(&entry);
1020 __ cmpq(rcx, rdi);
1021 __ j(less, &loop);
1022 }
1023
1024 // Add the object tag to make the JSObject real, so that we can continue and
1025 // jump into the continuation code at any time from now on. Any failures
1026 // need to undo the allocation, so that the heap is in a consistent state
1027 // and verifiable.
1028 // rax: initial map
1029 // rbx: JSObject
1030 // rdi: start of next object
1031 __ or_(rbx, Immediate(kHeapObjectTag));
1032
1033 // Check if a non-empty properties array is needed.
1034 // Allocate and initialize a FixedArray if it is.
1035 // rax: initial map
1036 // rbx: JSObject
1037 // rdi: start of next object
1038 // Calculate total properties described map.
1039 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
1040 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
1041 __ addq(rdx, rcx);
1042 // Calculate unused properties past the end of the in-object properties.
1043 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
1044 __ subq(rdx, rcx);
1045 // Done if no extra properties are to be allocated.
1046 __ j(zero, &allocated);
1047 __ Assert(positive, "Property allocation count failed.");
1048
1049 // Scale the number of elements by pointer size and add the header for
1050 // FixedArrays to the start of the next object calculation from above.
1051 // rbx: JSObject
1052 // rdi: start of next object (will be start of FixedArray)
1053 // rdx: number of elements in properties array
1054 __ AllocateInNewSpace(FixedArray::kHeaderSize,
1055 times_pointer_size,
1056 rdx,
1057 rdi,
1058 rax,
1059 no_reg,
1060 &undo_allocation,
1061 RESULT_CONTAINS_TOP);
1062
1063 // Initialize the FixedArray.
1064 // rbx: JSObject
1065 // rdi: FixedArray
1066 // rdx: number of elements
1067 // rax: start of next object
1068 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001069 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
1070 __ Integer32ToSmi(rdx, rdx);
1071 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
Steve Blocka7e24c12009-10-30 11:49:00 +00001072
1073 // Initialize the fields to undefined.
1074 // rbx: JSObject
1075 // rdi: FixedArray
1076 // rax: start of next object
1077 // rdx: number of elements
1078 { Label loop, entry;
1079 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1080 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
1081 __ jmp(&entry);
1082 __ bind(&loop);
1083 __ movq(Operand(rcx, 0), rdx);
1084 __ addq(rcx, Immediate(kPointerSize));
1085 __ bind(&entry);
1086 __ cmpq(rcx, rax);
1087 __ j(below, &loop);
1088 }
1089
1090 // Store the initialized FixedArray into the properties field of
1091 // the JSObject
1092 // rbx: JSObject
1093 // rdi: FixedArray
1094 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
1095 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
1096
1097
1098 // Continue with JSObject being successfully allocated
1099 // rbx: JSObject
1100 __ jmp(&allocated);
1101
1102 // Undo the setting of the new top so that the heap is verifiable. For
1103 // example, the map's unused properties potentially do not match the
1104 // allocated objects unused properties.
1105 // rbx: JSObject (previous new top)
1106 __ bind(&undo_allocation);
1107 __ UndoAllocationInNewSpace(rbx);
1108 }
1109
1110 // Allocate the new receiver object using the runtime call.
1111 // rdi: function (constructor)
1112 __ bind(&rt_call);
1113 // Must restore rdi (constructor) before calling runtime.
1114 __ movq(rdi, Operand(rsp, 0));
1115 __ push(rdi);
1116 __ CallRuntime(Runtime::kNewObject, 1);
1117 __ movq(rbx, rax); // store result in rbx
1118
1119 // New object allocated.
1120 // rbx: newly allocated object
1121 __ bind(&allocated);
1122 // Retrieve the function from the stack.
1123 __ pop(rdi);
1124
1125 // Retrieve smi-tagged arguments count from the stack.
1126 __ movq(rax, Operand(rsp, 0));
1127 __ SmiToInteger32(rax, rax);
1128
1129 // Push the allocated receiver to the stack. We need two copies
1130 // because we may have to return the original one and the calling
1131 // conventions dictate that the called function pops the receiver.
1132 __ push(rbx);
1133 __ push(rbx);
1134
1135 // Setup pointer to last argument.
1136 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
1137
1138 // Copy arguments and receiver to the expression stack.
1139 Label loop, entry;
1140 __ movq(rcx, rax);
1141 __ jmp(&entry);
1142 __ bind(&loop);
1143 __ push(Operand(rbx, rcx, times_pointer_size, 0));
1144 __ bind(&entry);
1145 __ decq(rcx);
1146 __ j(greater_equal, &loop);
1147
1148 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00001149 if (is_api_function) {
1150 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1151 Handle<Code> code = Handle<Code>(
1152 Builtins::builtin(Builtins::HandleApiCallConstruct));
1153 ParameterCount expected(0);
1154 __ InvokeCode(code, expected, expected,
1155 RelocInfo::CODE_TARGET, CALL_FUNCTION);
1156 } else {
1157 ParameterCount actual(rax);
1158 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
1159 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001160
1161 // Restore context from the frame.
1162 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1163
1164 // If the result is an object (in the ECMA sense), we should get rid
1165 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1166 // on page 74.
1167 Label use_receiver, exit;
1168 // If the result is a smi, it is *not* an object in the ECMA sense.
1169 __ JumpIfSmi(rax, &use_receiver);
1170
1171 // If the type of the result (stored in its map) is less than
1172 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
1173 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
1174 __ j(above_equal, &exit);
1175
1176 // Throw away the result of the constructor invocation and use the
1177 // on-stack receiver as the result.
1178 __ bind(&use_receiver);
1179 __ movq(rax, Operand(rsp, 0));
1180
1181 // Restore the arguments count and leave the construct frame.
1182 __ bind(&exit);
1183 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
1184 __ LeaveConstructFrame();
1185
1186 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +00001187 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +00001188 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1189 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001190 __ push(rcx);
1191 __ IncrementCounter(&Counters::constructed_objects, 1);
1192 __ ret(0);
1193}
1194
1195
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001196void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1197 Generate_JSConstructStubHelper(masm, false, true);
1198}
1199
1200
Leon Clarkee46be812010-01-19 14:06:41 +00001201void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001202 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +00001203}
1204
1205
1206void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001207 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +00001208}
1209
1210
Steve Blocka7e24c12009-10-30 11:49:00 +00001211static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1212 bool is_construct) {
1213 // Expects five C++ function parameters.
1214 // - Address entry (ignored)
1215 // - JSFunction* function (
1216 // - Object* receiver
1217 // - int argc
1218 // - Object*** argv
1219 // (see Handle::Invoke in execution.cc).
1220
1221 // Platform specific argument handling. After this, the stack contains
1222 // an internal frame and the pushed function and receiver, and
1223 // register rax and rbx holds the argument count and argument array,
1224 // while rdi holds the function pointer and rsi the context.
1225#ifdef _WIN64
1226 // MSVC parameters in:
1227 // rcx : entry (ignored)
1228 // rdx : function
1229 // r8 : receiver
1230 // r9 : argc
1231 // [rsp+0x20] : argv
1232
1233 // Clear the context before we push it when entering the JS frame.
1234 __ xor_(rsi, rsi);
1235 __ EnterInternalFrame();
1236
1237 // Load the function context into rsi.
1238 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
1239
1240 // Push the function and the receiver onto the stack.
1241 __ push(rdx);
1242 __ push(r8);
1243
1244 // Load the number of arguments and setup pointer to the arguments.
1245 __ movq(rax, r9);
1246 // Load the previous frame pointer to access C argument on stack
1247 __ movq(kScratchRegister, Operand(rbp, 0));
1248 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
1249 // Load the function pointer into rdi.
1250 __ movq(rdi, rdx);
Steve Block6ded16b2010-05-10 14:33:55 +01001251#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +00001252 // GCC parameters in:
1253 // rdi : entry (ignored)
1254 // rsi : function
1255 // rdx : receiver
1256 // rcx : argc
1257 // r8 : argv
1258
1259 __ movq(rdi, rsi);
1260 // rdi : function
1261
1262 // Clear the context before we push it when entering the JS frame.
1263 __ xor_(rsi, rsi);
1264 // Enter an internal frame.
1265 __ EnterInternalFrame();
1266
1267 // Push the function and receiver and setup the context.
1268 __ push(rdi);
1269 __ push(rdx);
1270 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1271
1272 // Load the number of arguments and setup pointer to the arguments.
1273 __ movq(rax, rcx);
1274 __ movq(rbx, r8);
1275#endif // _WIN64
1276
Steve Blocka7e24c12009-10-30 11:49:00 +00001277 // Current stack contents:
1278 // [rsp + 2 * kPointerSize ... ]: Internal frame
1279 // [rsp + kPointerSize] : function
1280 // [rsp] : receiver
1281 // Current register contents:
1282 // rax : argc
1283 // rbx : argv
1284 // rsi : context
1285 // rdi : function
1286
1287 // Copy arguments to the stack in a loop.
1288 // Register rbx points to array of pointers to handle locations.
1289 // Push the values of these handles.
1290 Label loop, entry;
1291 __ xor_(rcx, rcx); // Set loop variable to 0.
1292 __ jmp(&entry);
1293 __ bind(&loop);
1294 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
1295 __ push(Operand(kScratchRegister, 0)); // dereference handle
1296 __ addq(rcx, Immediate(1));
1297 __ bind(&entry);
1298 __ cmpq(rcx, rax);
1299 __ j(not_equal, &loop);
1300
1301 // Invoke the code.
1302 if (is_construct) {
1303 // Expects rdi to hold function pointer.
1304 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
1305 RelocInfo::CODE_TARGET);
1306 } else {
1307 ParameterCount actual(rax);
1308 // Function must be in rdi.
1309 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
1310 }
1311
1312 // Exit the JS frame. Notice that this also removes the empty
1313 // context and the function left on the stack by the code
1314 // invocation.
1315 __ LeaveInternalFrame();
1316 // TODO(X64): Is argument correct? Is there a receiver to remove?
1317 __ ret(1 * kPointerSize); // remove receiver
1318}
1319
1320
1321void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1322 Generate_JSEntryTrampolineHelper(masm, false);
1323}
1324
1325
1326void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1327 Generate_JSEntryTrampolineHelper(masm, true);
1328}
1329
Iain Merrick75681382010-08-19 15:07:18 +01001330
1331void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1332 // Enter an internal frame.
1333 __ EnterInternalFrame();
1334
1335 // Push a copy of the function onto the stack.
1336 __ push(rdi);
1337
1338 __ push(rdi); // Function is also the parameter to the runtime call.
1339 __ CallRuntime(Runtime::kLazyCompile, 1);
1340 __ pop(rdi);
1341
1342 // Tear down temporary frame.
1343 __ LeaveInternalFrame();
1344
1345 // Do a tail-call of the compiled function.
1346 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
1347 __ jmp(rcx);
1348}
1349
Steve Blocka7e24c12009-10-30 11:49:00 +00001350} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001351
1352#endif // V8_TARGET_ARCH_X64