blob: 08cd21d41b00dbaf6a29341c80c211b17a01aec1 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010029
30#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010033#include "deoptimizer.h"
34#include "full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Ben Murdochb0fe1622011-05-05 13:52:32 +010039
Steve Blocka7e24c12009-10-30 11:49:00 +000040#define __ ACCESS_MASM(masm)
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042
Leon Clarkee46be812010-01-19 14:06:41 +000043void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id,
45 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e -------------
47 // -- rax : number of arguments excluding receiver
48 // -- rdi : called function (only guaranteed when
49 // extra_args requires it)
50 // -- rsi : context
51 // -- rsp[0] : return address
52 // -- rsp[8] : last argument
53 // -- ...
54 // -- rsp[8 * argc] : first argument (argc == rax)
55 // -- rsp[8 * (argc +1)] : receiver
56 // -----------------------------------
57
58 // Insert extra arguments.
59 int num_extra_args = 0;
60 if (extra_args == NEEDS_CALLED_FUNCTION) {
61 num_extra_args = 1;
62 __ pop(kScratchRegister); // Save return address.
63 __ push(rdi);
64 __ push(kScratchRegister); // Restore return address.
65 } else {
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67 }
68
Steve Block6ded16b2010-05-10 14:33:55 +010069 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000070 // including the receiver and the extra arguments.
71 __ addq(rax, Immediate(num_extra_args + 1));
Steve Block6ded16b2010-05-10 14:33:55 +010072 __ JumpToExternalReference(ExternalReference(id), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000073}
74
75
Steve Blocka7e24c12009-10-30 11:49:00 +000076void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
77 // ----------- S t a t e -------------
78 // -- rax: number of arguments
79 // -- rdi: constructor function
80 // -----------------------------------
81
82 Label non_function_call;
83 // Check that function is not a smi.
84 __ JumpIfSmi(rdi, &non_function_call);
85 // Check that function is a JSFunction.
86 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
87 __ j(not_equal, &non_function_call);
88
89 // Jump to the function-specific construct stub.
90 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
91 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
92 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
93 __ jmp(rbx);
94
Kristian Monsen50ef84f2010-07-29 15:18:00 +010095 // rdi: called object
96 // rax: number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +000097 __ bind(&non_function_call);
Andrei Popescu402d9372010-02-26 13:31:12 +000098 // Set expected number of arguments to zero (not changing rax).
Steve Blocka7e24c12009-10-30 11:49:00 +000099 __ movq(rbx, Immediate(0));
100 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
101 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
102 RelocInfo::CODE_TARGET);
103}
104
105
Leon Clarkee46be812010-01-19 14:06:41 +0000106static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100107 bool is_api_function,
108 bool count_constructions) {
109 // Should never count constructions for api objects.
110 ASSERT(!is_api_function || !count_constructions);
111
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 // Enter a construct frame.
113 __ EnterConstructFrame();
114
115 // Store a smi-tagged arguments count on the stack.
116 __ Integer32ToSmi(rax, rax);
117 __ push(rax);
118
119 // Push the function to invoke on the stack.
120 __ push(rdi);
121
122 // Try to allocate the object without transitioning into C code. If any of the
123 // preconditions is not met, the code bails out to the runtime call.
124 Label rt_call, allocated;
125 if (FLAG_inline_new) {
126 Label undo_allocation;
127
128#ifdef ENABLE_DEBUGGER_SUPPORT
129 ExternalReference debug_step_in_fp =
130 ExternalReference::debug_step_in_fp_address();
131 __ movq(kScratchRegister, debug_step_in_fp);
132 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
133 __ j(not_equal, &rt_call);
134#endif
135
136 // Verified that the constructor is a JSFunction.
137 // Load the initial map and verify that it is in fact a map.
138 // rdi: constructor
139 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
140 // Will both indicate a NULL and a Smi
141 ASSERT(kSmiTag == 0);
142 __ JumpIfSmi(rax, &rt_call);
143 // rdi: constructor
144 // rax: initial map (if proven valid below)
145 __ CmpObjectType(rax, MAP_TYPE, rbx);
146 __ j(not_equal, &rt_call);
147
148 // Check that the constructor is not constructing a JSFunction (see comments
149 // in Runtime_NewObject in runtime.cc). In which case the initial map's
150 // instance type would be JS_FUNCTION_TYPE.
151 // rdi: constructor
152 // rax: initial map
153 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
154 __ j(equal, &rt_call);
155
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100156 if (count_constructions) {
157 Label allocate;
158 // Decrease generous allocation count.
159 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
160 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
161 __ j(not_zero, &allocate);
162
163 __ push(rax);
164 __ push(rdi);
165
166 __ push(rdi); // constructor
167 // The call will replace the stub, so the countdown is only done once.
168 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
169
170 __ pop(rdi);
171 __ pop(rax);
172
173 __ bind(&allocate);
174 }
175
Steve Blocka7e24c12009-10-30 11:49:00 +0000176 // Now allocate the JSObject on the heap.
177 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
178 __ shl(rdi, Immediate(kPointerSizeLog2));
179 // rdi: size of new object
180 __ AllocateInNewSpace(rdi,
181 rbx,
182 rdi,
183 no_reg,
184 &rt_call,
185 NO_ALLOCATION_FLAGS);
186 // Allocated the JSObject, now initialize the fields.
187 // rax: initial map
188 // rbx: JSObject (not HeapObject tagged - the actual address).
189 // rdi: start of next object
190 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
191 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
192 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
193 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
194 // Set extra fields in the newly allocated object.
195 // rax: initial map
196 // rbx: JSObject
197 // rdi: start of next object
198 { Label loop, entry;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100199 // To allow for truncation.
200 if (count_constructions) {
201 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
202 } else {
203 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
204 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000205 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
206 __ jmp(&entry);
207 __ bind(&loop);
208 __ movq(Operand(rcx, 0), rdx);
209 __ addq(rcx, Immediate(kPointerSize));
210 __ bind(&entry);
211 __ cmpq(rcx, rdi);
212 __ j(less, &loop);
213 }
214
215 // Add the object tag to make the JSObject real, so that we can continue and
216 // jump into the continuation code at any time from now on. Any failures
217 // need to undo the allocation, so that the heap is in a consistent state
218 // and verifiable.
219 // rax: initial map
220 // rbx: JSObject
221 // rdi: start of next object
222 __ or_(rbx, Immediate(kHeapObjectTag));
223
224 // Check if a non-empty properties array is needed.
225 // Allocate and initialize a FixedArray if it is.
226 // rax: initial map
227 // rbx: JSObject
228 // rdi: start of next object
229 // Calculate total properties described map.
230 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
231 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
232 __ addq(rdx, rcx);
233 // Calculate unused properties past the end of the in-object properties.
234 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
235 __ subq(rdx, rcx);
236 // Done if no extra properties are to be allocated.
237 __ j(zero, &allocated);
238 __ Assert(positive, "Property allocation count failed.");
239
240 // Scale the number of elements by pointer size and add the header for
241 // FixedArrays to the start of the next object calculation from above.
242 // rbx: JSObject
243 // rdi: start of next object (will be start of FixedArray)
244 // rdx: number of elements in properties array
245 __ AllocateInNewSpace(FixedArray::kHeaderSize,
246 times_pointer_size,
247 rdx,
248 rdi,
249 rax,
250 no_reg,
251 &undo_allocation,
252 RESULT_CONTAINS_TOP);
253
254 // Initialize the FixedArray.
255 // rbx: JSObject
256 // rdi: FixedArray
257 // rdx: number of elements
258 // rax: start of next object
259 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100260 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
261 __ Integer32ToSmi(rdx, rdx);
262 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
Steve Blocka7e24c12009-10-30 11:49:00 +0000263
264 // Initialize the fields to undefined.
265 // rbx: JSObject
266 // rdi: FixedArray
267 // rax: start of next object
268 // rdx: number of elements
269 { Label loop, entry;
270 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
271 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
272 __ jmp(&entry);
273 __ bind(&loop);
274 __ movq(Operand(rcx, 0), rdx);
275 __ addq(rcx, Immediate(kPointerSize));
276 __ bind(&entry);
277 __ cmpq(rcx, rax);
278 __ j(below, &loop);
279 }
280
281 // Store the initialized FixedArray into the properties field of
282 // the JSObject
283 // rbx: JSObject
284 // rdi: FixedArray
285 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
286 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
287
288
289 // Continue with JSObject being successfully allocated
290 // rbx: JSObject
291 __ jmp(&allocated);
292
293 // Undo the setting of the new top so that the heap is verifiable. For
294 // example, the map's unused properties potentially do not match the
295 // allocated objects unused properties.
296 // rbx: JSObject (previous new top)
297 __ bind(&undo_allocation);
298 __ UndoAllocationInNewSpace(rbx);
299 }
300
301 // Allocate the new receiver object using the runtime call.
302 // rdi: function (constructor)
303 __ bind(&rt_call);
304 // Must restore rdi (constructor) before calling runtime.
305 __ movq(rdi, Operand(rsp, 0));
306 __ push(rdi);
307 __ CallRuntime(Runtime::kNewObject, 1);
308 __ movq(rbx, rax); // store result in rbx
309
310 // New object allocated.
311 // rbx: newly allocated object
312 __ bind(&allocated);
313 // Retrieve the function from the stack.
314 __ pop(rdi);
315
316 // Retrieve smi-tagged arguments count from the stack.
317 __ movq(rax, Operand(rsp, 0));
318 __ SmiToInteger32(rax, rax);
319
320 // Push the allocated receiver to the stack. We need two copies
321 // because we may have to return the original one and the calling
322 // conventions dictate that the called function pops the receiver.
323 __ push(rbx);
324 __ push(rbx);
325
326 // Setup pointer to last argument.
327 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
328
329 // Copy arguments and receiver to the expression stack.
330 Label loop, entry;
331 __ movq(rcx, rax);
332 __ jmp(&entry);
333 __ bind(&loop);
334 __ push(Operand(rbx, rcx, times_pointer_size, 0));
335 __ bind(&entry);
336 __ decq(rcx);
337 __ j(greater_equal, &loop);
338
339 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +0000340 if (is_api_function) {
341 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
342 Handle<Code> code = Handle<Code>(
343 Builtins::builtin(Builtins::HandleApiCallConstruct));
344 ParameterCount expected(0);
345 __ InvokeCode(code, expected, expected,
346 RelocInfo::CODE_TARGET, CALL_FUNCTION);
347 } else {
348 ParameterCount actual(rax);
349 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
350 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000351
352 // Restore context from the frame.
353 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
354
355 // If the result is an object (in the ECMA sense), we should get rid
356 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
357 // on page 74.
358 Label use_receiver, exit;
359 // If the result is a smi, it is *not* an object in the ECMA sense.
360 __ JumpIfSmi(rax, &use_receiver);
361
362 // If the type of the result (stored in its map) is less than
363 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
364 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
365 __ j(above_equal, &exit);
366
367 // Throw away the result of the constructor invocation and use the
368 // on-stack receiver as the result.
369 __ bind(&use_receiver);
370 __ movq(rax, Operand(rsp, 0));
371
372 // Restore the arguments count and leave the construct frame.
373 __ bind(&exit);
374 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
375 __ LeaveConstructFrame();
376
377 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +0000378 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000379 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
380 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000381 __ push(rcx);
382 __ IncrementCounter(&Counters::constructed_objects, 1);
383 __ ret(0);
384}
385
386
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100387void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
388 Generate_JSConstructStubHelper(masm, false, true);
389}
390
391
Leon Clarkee46be812010-01-19 14:06:41 +0000392void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100393 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000394}
395
396
397void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100398 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000399}
400
401
Steve Blocka7e24c12009-10-30 11:49:00 +0000402static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
403 bool is_construct) {
404 // Expects five C++ function parameters.
405 // - Address entry (ignored)
406 // - JSFunction* function (
407 // - Object* receiver
408 // - int argc
409 // - Object*** argv
410 // (see Handle::Invoke in execution.cc).
411
412 // Platform specific argument handling. After this, the stack contains
413 // an internal frame and the pushed function and receiver, and
414 // register rax and rbx holds the argument count and argument array,
415 // while rdi holds the function pointer and rsi the context.
416#ifdef _WIN64
417 // MSVC parameters in:
418 // rcx : entry (ignored)
419 // rdx : function
420 // r8 : receiver
421 // r9 : argc
422 // [rsp+0x20] : argv
423
424 // Clear the context before we push it when entering the JS frame.
Steve Block9fac8402011-05-12 15:51:54 +0100425 __ Set(rsi, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000426 __ EnterInternalFrame();
427
428 // Load the function context into rsi.
429 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
430
431 // Push the function and the receiver onto the stack.
432 __ push(rdx);
433 __ push(r8);
434
435 // Load the number of arguments and setup pointer to the arguments.
436 __ movq(rax, r9);
437 // Load the previous frame pointer to access C argument on stack
438 __ movq(kScratchRegister, Operand(rbp, 0));
439 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
440 // Load the function pointer into rdi.
441 __ movq(rdi, rdx);
Steve Block6ded16b2010-05-10 14:33:55 +0100442#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 // GCC parameters in:
444 // rdi : entry (ignored)
445 // rsi : function
446 // rdx : receiver
447 // rcx : argc
448 // r8 : argv
449
450 __ movq(rdi, rsi);
451 // rdi : function
452
453 // Clear the context before we push it when entering the JS frame.
Steve Block9fac8402011-05-12 15:51:54 +0100454 __ Set(rsi, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000455 // Enter an internal frame.
456 __ EnterInternalFrame();
457
458 // Push the function and receiver and setup the context.
459 __ push(rdi);
460 __ push(rdx);
461 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
462
463 // Load the number of arguments and setup pointer to the arguments.
464 __ movq(rax, rcx);
465 __ movq(rbx, r8);
466#endif // _WIN64
467
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 // Current stack contents:
469 // [rsp + 2 * kPointerSize ... ]: Internal frame
470 // [rsp + kPointerSize] : function
471 // [rsp] : receiver
472 // Current register contents:
473 // rax : argc
474 // rbx : argv
475 // rsi : context
476 // rdi : function
477
478 // Copy arguments to the stack in a loop.
479 // Register rbx points to array of pointers to handle locations.
480 // Push the values of these handles.
481 Label loop, entry;
Steve Block9fac8402011-05-12 15:51:54 +0100482 __ Set(rcx, 0); // Set loop variable to 0.
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 __ jmp(&entry);
484 __ bind(&loop);
485 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
486 __ push(Operand(kScratchRegister, 0)); // dereference handle
487 __ addq(rcx, Immediate(1));
488 __ bind(&entry);
489 __ cmpq(rcx, rax);
490 __ j(not_equal, &loop);
491
492 // Invoke the code.
493 if (is_construct) {
494 // Expects rdi to hold function pointer.
495 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
496 RelocInfo::CODE_TARGET);
497 } else {
498 ParameterCount actual(rax);
499 // Function must be in rdi.
500 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
501 }
502
503 // Exit the JS frame. Notice that this also removes the empty
504 // context and the function left on the stack by the code
505 // invocation.
506 __ LeaveInternalFrame();
507 // TODO(X64): Is argument correct? Is there a receiver to remove?
508 __ ret(1 * kPointerSize); // remove receiver
509}
510
511
512void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
513 Generate_JSEntryTrampolineHelper(masm, false);
514}
515
516
517void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
518 Generate_JSEntryTrampolineHelper(masm, true);
519}
520
Iain Merrick75681382010-08-19 15:07:18 +0100521
522void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
523 // Enter an internal frame.
524 __ EnterInternalFrame();
525
526 // Push a copy of the function onto the stack.
527 __ push(rdi);
528
529 __ push(rdi); // Function is also the parameter to the runtime call.
530 __ CallRuntime(Runtime::kLazyCompile, 1);
531 __ pop(rdi);
532
533 // Tear down temporary frame.
534 __ LeaveInternalFrame();
535
536 // Do a tail-call of the compiled function.
537 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
538 __ jmp(rcx);
539}
540
Ben Murdochb0fe1622011-05-05 13:52:32 +0100541
542void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
543 // Enter an internal frame.
544 __ EnterInternalFrame();
545
546 // Push a copy of the function onto the stack.
547 __ push(rdi);
548
549 __ push(rdi); // Function is also the parameter to the runtime call.
550 __ CallRuntime(Runtime::kLazyRecompile, 1);
551
552 // Restore function and tear down temporary frame.
553 __ pop(rdi);
554 __ LeaveInternalFrame();
555
556 // Do a tail-call of the compiled function.
557 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
558 __ jmp(rcx);
559}
560
561
562static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
563 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +0100564 // Enter an internal frame.
565 __ EnterInternalFrame();
566
567 // Pass the deoptimization type to the runtime system.
568 __ Push(Smi::FromInt(static_cast<int>(type)));
569
570 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
571 // Tear down temporary frame.
572 __ LeaveInternalFrame();
573
574 // Get the full codegen state from the stack and untag it.
575 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
576
577 // Switch on the state.
578 NearLabel not_no_registers, not_tos_rax;
579 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
580 __ j(not_equal, &not_no_registers);
581 __ ret(1 * kPointerSize); // Remove state.
582
583 __ bind(&not_no_registers);
584 __ movq(rax, Operand(rsp, 2 * kPointerSize));
585 __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
586 __ j(not_equal, &not_tos_rax);
587 __ ret(2 * kPointerSize); // Remove state, rax.
588
589 __ bind(&not_tos_rax);
590 __ Abort("no cases left");
Ben Murdochb0fe1622011-05-05 13:52:32 +0100591}
592
593void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
594 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
595}
596
597
598void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100599 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100600}
601
602
603void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
604 __ int3();
605}
606
607
608void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
609 // Stack Layout:
610 // rsp[0]: Return address
611 // rsp[1]: Argument n
612 // rsp[2]: Argument n-1
613 // ...
614 // rsp[n]: Argument 1
615 // rsp[n+1]: Receiver (function to call)
616 //
617 // rax contains the number of arguments, n, not counting the receiver.
618 //
619 // 1. Make sure we have at least one argument.
620 { Label done;
621 __ testq(rax, rax);
622 __ j(not_zero, &done);
623 __ pop(rbx);
624 __ Push(Factory::undefined_value());
625 __ push(rbx);
626 __ incq(rax);
627 __ bind(&done);
628 }
629
630 // 2. Get the function to call (passed as receiver) from the stack, check
631 // if it is a function.
632 Label non_function;
633 // The function to call is at position n+1 on the stack.
634 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
635 __ JumpIfSmi(rdi, &non_function);
636 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
637 __ j(not_equal, &non_function);
638
639 // 3a. Patch the first argument if necessary when calling a function.
640 Label shift_arguments;
641 { Label convert_to_object, use_global_receiver, patch_receiver;
642 // Change context eagerly in case we need the global receiver.
643 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
644
645 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
646 __ JumpIfSmi(rbx, &convert_to_object);
647
648 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
649 __ j(equal, &use_global_receiver);
650 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
651 __ j(equal, &use_global_receiver);
652
653 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
654 __ j(below, &convert_to_object);
655 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
656 __ j(below_equal, &shift_arguments);
657
658 __ bind(&convert_to_object);
659 __ EnterInternalFrame(); // In order to preserve argument count.
660 __ Integer32ToSmi(rax, rax);
661 __ push(rax);
662
663 __ push(rbx);
664 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
665 __ movq(rbx, rax);
666
667 __ pop(rax);
668 __ SmiToInteger32(rax, rax);
669 __ LeaveInternalFrame();
670 // Restore the function to rdi.
671 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
672 __ jmp(&patch_receiver);
673
674 // Use the global receiver object from the called function as the
675 // receiver.
676 __ bind(&use_global_receiver);
677 const int kGlobalIndex =
678 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
679 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
680 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
681 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
682 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
683
684 __ bind(&patch_receiver);
685 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
686
687 __ jmp(&shift_arguments);
688 }
689
690
691 // 3b. Patch the first argument when calling a non-function. The
692 // CALL_NON_FUNCTION builtin expects the non-function callee as
693 // receiver, so overwrite the first argument which will ultimately
694 // become the receiver.
695 __ bind(&non_function);
696 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
Steve Block9fac8402011-05-12 15:51:54 +0100697 __ Set(rdi, 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100698
699 // 4. Shift arguments and return address one slot down on the stack
700 // (overwriting the original receiver). Adjust argument count to make
701 // the original first argument the new receiver.
702 __ bind(&shift_arguments);
703 { Label loop;
704 __ movq(rcx, rax);
705 __ bind(&loop);
706 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
707 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
708 __ decq(rcx);
709 __ j(not_sign, &loop); // While non-negative (to copy return address).
710 __ pop(rbx); // Discard copy of return address.
711 __ decq(rax); // One fewer argument (first argument is new receiver).
712 }
713
714 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
715 { Label function;
716 __ testq(rdi, rdi);
717 __ j(not_zero, &function);
Steve Block9fac8402011-05-12 15:51:54 +0100718 __ Set(rbx, 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100719 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
720 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
721 RelocInfo::CODE_TARGET);
722 __ bind(&function);
723 }
724
725 // 5b. Get the code to call from the function and check that the number of
726 // expected arguments matches what we're providing. If so, jump
727 // (tail-call) to the code in register edx without checking arguments.
728 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
729 __ movsxlq(rbx,
730 FieldOperand(rdx,
731 SharedFunctionInfo::kFormalParameterCountOffset));
732 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
733 __ cmpq(rax, rbx);
734 __ j(not_equal,
735 Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
736 RelocInfo::CODE_TARGET);
737
738 ParameterCount expected(0);
739 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
740}
741
742
743void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
744 // Stack at entry:
745 // rsp: return address
746 // rsp+8: arguments
747 // rsp+16: receiver ("this")
748 // rsp+24: function
749 __ EnterInternalFrame();
750 // Stack frame:
751 // rbp: Old base pointer
752 // rbp[1]: return address
753 // rbp[2]: function arguments
754 // rbp[3]: receiver
755 // rbp[4]: function
756 static const int kArgumentsOffset = 2 * kPointerSize;
757 static const int kReceiverOffset = 3 * kPointerSize;
758 static const int kFunctionOffset = 4 * kPointerSize;
759 __ push(Operand(rbp, kFunctionOffset));
760 __ push(Operand(rbp, kArgumentsOffset));
761 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
762
763 // Check the stack for overflow. We are not trying need to catch
764 // interruptions (e.g. debug break and preemption) here, so the "real stack
765 // limit" is checked.
766 Label okay;
767 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
768 __ movq(rcx, rsp);
769 // Make rcx the space we have left. The stack might already be overflowed
770 // here which will cause rcx to become negative.
771 __ subq(rcx, kScratchRegister);
772 // Make rdx the space we need for the array when it is unrolled onto the
773 // stack.
774 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
775 // Check if the arguments will overflow the stack.
776 __ cmpq(rcx, rdx);
777 __ j(greater, &okay); // Signed comparison.
778
779 // Out of stack space.
780 __ push(Operand(rbp, kFunctionOffset));
781 __ push(rax);
782 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
783 __ bind(&okay);
784 // End of stack check.
785
786 // Push current index and limit.
787 const int kLimitOffset =
788 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
789 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
790 __ push(rax); // limit
791 __ push(Immediate(0)); // index
792
793 // Change context eagerly to get the right global object if
794 // necessary.
795 __ movq(rdi, Operand(rbp, kFunctionOffset));
796 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
797
798 // Compute the receiver.
799 Label call_to_object, use_global_receiver, push_receiver;
800 __ movq(rbx, Operand(rbp, kReceiverOffset));
801 __ JumpIfSmi(rbx, &call_to_object);
802 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
803 __ j(equal, &use_global_receiver);
804 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
805 __ j(equal, &use_global_receiver);
806
807 // If given receiver is already a JavaScript object then there's no
808 // reason for converting it.
809 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
810 __ j(below, &call_to_object);
811 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
812 __ j(below_equal, &push_receiver);
813
814 // Convert the receiver to an object.
815 __ bind(&call_to_object);
816 __ push(rbx);
817 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
818 __ movq(rbx, rax);
819 __ jmp(&push_receiver);
820
821 // Use the current global receiver object as the receiver.
822 __ bind(&use_global_receiver);
823 const int kGlobalOffset =
824 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
825 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
826 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
827 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
828 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
829
830 // Push the receiver.
831 __ bind(&push_receiver);
832 __ push(rbx);
833
834 // Copy all arguments from the array to the stack.
835 Label entry, loop;
836 __ movq(rax, Operand(rbp, kIndexOffset));
837 __ jmp(&entry);
838 __ bind(&loop);
839 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
840
841 // Use inline caching to speed up access to arguments.
842 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
843 __ Call(ic, RelocInfo::CODE_TARGET);
844 // It is important that we do not have a test instruction after the
845 // call. A test instruction after the call is used to indicate that
846 // we have generated an inline version of the keyed load. In this
847 // case, we know that we are not generating a test instruction next.
848
849 // Push the nth argument.
850 __ push(rax);
851
852 // Update the index on the stack and in register rax.
853 __ movq(rax, Operand(rbp, kIndexOffset));
854 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
855 __ movq(Operand(rbp, kIndexOffset), rax);
856
857 __ bind(&entry);
858 __ cmpq(rax, Operand(rbp, kLimitOffset));
859 __ j(not_equal, &loop);
860
861 // Invoke the function.
862 ParameterCount actual(rax);
863 __ SmiToInteger32(rax, rax);
864 __ movq(rdi, Operand(rbp, kFunctionOffset));
865 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
866
867 __ LeaveInternalFrame();
868 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
869}
870
871
872// Number of empty elements to allocate for an empty array.
873static const int kPreallocatedArrayElements = 4;
874
875
876// Allocate an empty JSArray. The allocated array is put into the result
877// register. If the parameter initial_capacity is larger than zero an elements
878// backing store is allocated with this size and filled with the hole values.
879// Otherwise the elements backing store is set to the empty FixedArray.
880static void AllocateEmptyJSArray(MacroAssembler* masm,
881 Register array_function,
882 Register result,
883 Register scratch1,
884 Register scratch2,
885 Register scratch3,
886 int initial_capacity,
887 Label* gc_required) {
888 ASSERT(initial_capacity >= 0);
889
890 // Load the initial map from the array function.
891 __ movq(scratch1, FieldOperand(array_function,
892 JSFunction::kPrototypeOrInitialMapOffset));
893
894 // Allocate the JSArray object together with space for a fixed array with the
895 // requested elements.
896 int size = JSArray::kSize;
897 if (initial_capacity > 0) {
898 size += FixedArray::SizeFor(initial_capacity);
899 }
900 __ AllocateInNewSpace(size,
901 result,
902 scratch2,
903 scratch3,
904 gc_required,
905 TAG_OBJECT);
906
907 // Allocated the JSArray. Now initialize the fields except for the elements
908 // array.
909 // result: JSObject
910 // scratch1: initial map
911 // scratch2: start of next object
912 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
913 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
914 Factory::empty_fixed_array());
915 // Field JSArray::kElementsOffset is initialized later.
916 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
917
918 // If no storage is requested for the elements array just set the empty
919 // fixed array.
920 if (initial_capacity == 0) {
921 __ Move(FieldOperand(result, JSArray::kElementsOffset),
922 Factory::empty_fixed_array());
923 return;
924 }
925
926 // Calculate the location of the elements array and set elements array member
927 // of the JSArray.
928 // result: JSObject
929 // scratch2: start of next object
930 __ lea(scratch1, Operand(result, JSArray::kSize));
931 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
932
933 // Initialize the FixedArray and fill it with holes. FixedArray length is
934 // stored as a smi.
935 // result: JSObject
936 // scratch1: elements array
937 // scratch2: start of next object
938 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
939 Factory::fixed_array_map());
940 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
941 Smi::FromInt(initial_capacity));
942
943 // Fill the FixedArray with the hole value. Inline the code if short.
944 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
945 static const int kLoopUnfoldLimit = 4;
946 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
947 __ Move(scratch3, Factory::the_hole_value());
948 if (initial_capacity <= kLoopUnfoldLimit) {
949 // Use a scratch register here to have only one reloc info when unfolding
950 // the loop.
951 for (int i = 0; i < initial_capacity; i++) {
952 __ movq(FieldOperand(scratch1,
953 FixedArray::kHeaderSize + i * kPointerSize),
954 scratch3);
955 }
956 } else {
957 Label loop, entry;
958 __ jmp(&entry);
959 __ bind(&loop);
960 __ movq(Operand(scratch1, 0), scratch3);
961 __ addq(scratch1, Immediate(kPointerSize));
962 __ bind(&entry);
963 __ cmpq(scratch1, scratch2);
964 __ j(below, &loop);
965 }
966}
967
968
969// Allocate a JSArray with the number of elements stored in a register. The
970// register array_function holds the built-in Array function and the register
971// array_size holds the size of the array as a smi. The allocated array is put
972// into the result register and beginning and end of the FixedArray elements
973// storage is put into registers elements_array and elements_array_end (see
974// below for when that is not the case). If the parameter fill_with_holes is
975// true the allocated elements backing store is filled with the hole values
976// otherwise it is left uninitialized. When the backing store is filled the
977// register elements_array is scratched.
978static void AllocateJSArray(MacroAssembler* masm,
979 Register array_function, // Array function.
980 Register array_size, // As a smi.
981 Register result,
982 Register elements_array,
983 Register elements_array_end,
984 Register scratch,
985 bool fill_with_hole,
986 Label* gc_required) {
987 Label not_empty, allocated;
988
989 // Load the initial map from the array function.
990 __ movq(elements_array,
991 FieldOperand(array_function,
992 JSFunction::kPrototypeOrInitialMapOffset));
993
994 // Check whether an empty sized array is requested.
995 __ testq(array_size, array_size);
996 __ j(not_zero, &not_empty);
997
998 // If an empty array is requested allocate a small elements array anyway. This
999 // keeps the code below free of special casing for the empty array.
1000 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
1001 __ AllocateInNewSpace(size,
1002 result,
1003 elements_array_end,
1004 scratch,
1005 gc_required,
1006 TAG_OBJECT);
1007 __ jmp(&allocated);
1008
1009 // Allocate the JSArray object together with space for a FixedArray with the
1010 // requested elements.
1011 __ bind(&not_empty);
1012 SmiIndex index =
1013 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
1014 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1015 index.scale,
1016 index.reg,
1017 result,
1018 elements_array_end,
1019 scratch,
1020 gc_required,
1021 TAG_OBJECT);
1022
1023 // Allocated the JSArray. Now initialize the fields except for the elements
1024 // array.
1025 // result: JSObject
1026 // elements_array: initial map
1027 // elements_array_end: start of next object
1028 // array_size: size of array (smi)
1029 __ bind(&allocated);
1030 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
1031 __ Move(elements_array, Factory::empty_fixed_array());
1032 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1033 // Field JSArray::kElementsOffset is initialized later.
1034 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1035
1036 // Calculate the location of the elements array and set elements array member
1037 // of the JSArray.
1038 // result: JSObject
1039 // elements_array_end: start of next object
1040 // array_size: size of array (smi)
1041 __ lea(elements_array, Operand(result, JSArray::kSize));
1042 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1043
1044 // Initialize the fixed array. FixedArray length is stored as a smi.
1045 // result: JSObject
1046 // elements_array: elements array
1047 // elements_array_end: start of next object
1048 // array_size: size of array (smi)
1049 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
1050 Factory::fixed_array_map());
1051 Label not_empty_2, fill_array;
1052 __ SmiTest(array_size);
1053 __ j(not_zero, &not_empty_2);
1054 // Length of the FixedArray is the number of pre-allocated elements even
1055 // though the actual JSArray has length 0.
1056 __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
1057 Smi::FromInt(kPreallocatedArrayElements));
1058 __ jmp(&fill_array);
1059 __ bind(&not_empty_2);
1060 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1061 // same.
1062 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1063
1064 // Fill the allocated FixedArray with the hole value if requested.
1065 // result: JSObject
1066 // elements_array: elements array
1067 // elements_array_end: start of next object
1068 __ bind(&fill_array);
1069 if (fill_with_hole) {
1070 Label loop, entry;
1071 __ Move(scratch, Factory::the_hole_value());
1072 __ lea(elements_array, Operand(elements_array,
1073 FixedArray::kHeaderSize - kHeapObjectTag));
1074 __ jmp(&entry);
1075 __ bind(&loop);
1076 __ movq(Operand(elements_array, 0), scratch);
1077 __ addq(elements_array, Immediate(kPointerSize));
1078 __ bind(&entry);
1079 __ cmpq(elements_array, elements_array_end);
1080 __ j(below, &loop);
1081 }
1082}
1083
1084
1085// Create a new array for the built-in Array function. This function allocates
1086// the JSArray object and the FixedArray elements array and initializes these.
1087// If the Array cannot be constructed in native code the runtime is called. This
1088// function assumes the following state:
1089// rdi: constructor (built-in Array function)
1090// rax: argc
1091// rsp[0]: return address
1092// rsp[8]: last argument
1093// This function is used for both construct and normal calls of Array. The only
1094// difference between handling a construct call and a normal call is that for a
1095// construct call the constructor function in rdi needs to be preserved for
1096// entering the generic code. In both cases argc in rax needs to be preserved.
1097// Both registers are preserved by this code so no need to differentiate between
1098// a construct call and a normal call.
1099static void ArrayNativeCode(MacroAssembler* masm,
1100 Label *call_generic_code) {
1101 Label argc_one_or_more, argc_two_or_more;
1102
1103 // Check for array construction with zero arguments.
1104 __ testq(rax, rax);
1105 __ j(not_zero, &argc_one_or_more);
1106
1107 // Handle construction of an empty array.
1108 AllocateEmptyJSArray(masm,
1109 rdi,
1110 rbx,
1111 rcx,
1112 rdx,
1113 r8,
1114 kPreallocatedArrayElements,
1115 call_generic_code);
1116 __ IncrementCounter(&Counters::array_function_native, 1);
1117 __ movq(rax, rbx);
1118 __ ret(kPointerSize);
1119
1120 // Check for one argument. Bail out if argument is not smi or if it is
1121 // negative.
1122 __ bind(&argc_one_or_more);
1123 __ cmpq(rax, Immediate(1));
1124 __ j(not_equal, &argc_two_or_more);
1125 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
1126 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1127
1128 // Handle construction of an empty array of a certain size. Bail out if size
1129 // is to large to actually allocate an elements array.
1130 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
1131 __ j(greater_equal, call_generic_code);
1132
1133 // rax: argc
1134 // rdx: array_size (smi)
1135 // rdi: constructor
1136 // esp[0]: return address
1137 // esp[8]: argument
1138 AllocateJSArray(masm,
1139 rdi,
1140 rdx,
1141 rbx,
1142 rcx,
1143 r8,
1144 r9,
1145 true,
1146 call_generic_code);
1147 __ IncrementCounter(&Counters::array_function_native, 1);
1148 __ movq(rax, rbx);
1149 __ ret(2 * kPointerSize);
1150
1151 // Handle construction of an array from a list of arguments.
1152 __ bind(&argc_two_or_more);
1153 __ movq(rdx, rax);
1154 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1155 // rax: argc
1156 // rdx: array_size (smi)
1157 // rdi: constructor
1158 // esp[0] : return address
1159 // esp[8] : last argument
1160 AllocateJSArray(masm,
1161 rdi,
1162 rdx,
1163 rbx,
1164 rcx,
1165 r8,
1166 r9,
1167 false,
1168 call_generic_code);
1169 __ IncrementCounter(&Counters::array_function_native, 1);
1170
1171 // rax: argc
1172 // rbx: JSArray
1173 // rcx: elements_array
1174 // r8: elements_array_end (untagged)
1175 // esp[0]: return address
1176 // esp[8]: last argument
1177
1178 // Location of the last argument
1179 __ lea(r9, Operand(rsp, kPointerSize));
1180
1181 // Location of the first array element (Parameter fill_with_holes to
1182 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1183 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1184
1185 // rax: argc
1186 // rbx: JSArray
1187 // rdx: location of the first array element
1188 // r9: location of the last argument
1189 // esp[0]: return address
1190 // esp[8]: last argument
1191 Label loop, entry;
1192 __ movq(rcx, rax);
1193 __ jmp(&entry);
1194 __ bind(&loop);
1195 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
1196 __ movq(Operand(rdx, 0), kScratchRegister);
1197 __ addq(rdx, Immediate(kPointerSize));
1198 __ bind(&entry);
1199 __ decq(rcx);
1200 __ j(greater_equal, &loop);
1201
1202 // Remove caller arguments from the stack and return.
1203 // rax: argc
1204 // rbx: JSArray
1205 // esp[0]: return address
1206 // esp[8]: last argument
1207 __ pop(rcx);
1208 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1209 __ push(rcx);
1210 __ movq(rax, rbx);
1211 __ ret(0);
1212}
1213
1214
1215void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1216 // ----------- S t a t e -------------
1217 // -- rax : argc
1218 // -- rsp[0] : return address
1219 // -- rsp[8] : last argument
1220 // -----------------------------------
1221 Label generic_array_code;
1222
1223 // Get the Array function.
1224 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1225
1226 if (FLAG_debug_code) {
1227 // Initial map for the builtin Array function shoud be a map.
1228 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1229 // Will both indicate a NULL and a Smi.
1230 ASSERT(kSmiTag == 0);
1231 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1232 __ Check(not_smi, "Unexpected initial map for Array function");
1233 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1234 __ Check(equal, "Unexpected initial map for Array function");
1235 }
1236
1237 // Run the native code for the Array function called as a normal function.
1238 ArrayNativeCode(masm, &generic_array_code);
1239
1240 // Jump to the generic array code in case the specialized code cannot handle
1241 // the construction.
1242 __ bind(&generic_array_code);
1243 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
1244 Handle<Code> array_code(code);
1245 __ Jump(array_code, RelocInfo::CODE_TARGET);
1246}
1247
1248
1249void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1250 // ----------- S t a t e -------------
1251 // -- rax : argc
1252 // -- rdi : constructor
1253 // -- rsp[0] : return address
1254 // -- rsp[8] : last argument
1255 // -----------------------------------
1256 Label generic_constructor;
1257
1258 if (FLAG_debug_code) {
1259 // The array construct code is only set for the builtin Array function which
1260 // does always have a map.
1261 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rbx);
1262 __ cmpq(rdi, rbx);
1263 __ Check(equal, "Unexpected Array function");
1264 // Initial map for the builtin Array function should be a map.
1265 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1266 // Will both indicate a NULL and a Smi.
1267 ASSERT(kSmiTag == 0);
1268 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1269 __ Check(not_smi, "Unexpected initial map for Array function");
1270 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1271 __ Check(equal, "Unexpected initial map for Array function");
1272 }
1273
1274 // Run the native code for the Array function called as constructor.
1275 ArrayNativeCode(masm, &generic_constructor);
1276
1277 // Jump to the generic construct code in case the specialized code cannot
1278 // handle the construction.
1279 __ bind(&generic_constructor);
1280 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1281 Handle<Code> generic_construct_stub(code);
1282 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1283}
1284
1285
1286void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1287 // TODO(849): implement custom construct stub.
1288 // Generate a copy of the generic stub for now.
1289 Generate_JSConstructStubGeneric(masm);
1290}
1291
1292
1293static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1294 __ push(rbp);
1295 __ movq(rbp, rsp);
1296
1297 // Store the arguments adaptor context sentinel.
1298 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1299
1300 // Push the function on the stack.
1301 __ push(rdi);
1302
1303 // Preserve the number of arguments on the stack. Must preserve both
1304 // rax and rbx because these registers are used when copying the
1305 // arguments and the receiver.
1306 __ Integer32ToSmi(rcx, rax);
1307 __ push(rcx);
1308}
1309
1310
1311static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1312 // Retrieve the number of arguments from the stack. Number is a Smi.
1313 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1314
1315 // Leave the frame.
1316 __ movq(rsp, rbp);
1317 __ pop(rbp);
1318
1319 // Remove caller arguments from the stack.
1320 __ pop(rcx);
1321 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1322 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1323 __ push(rcx);
1324}
1325
1326
1327void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1328 // ----------- S t a t e -------------
1329 // -- rax : actual number of arguments
1330 // -- rbx : expected number of arguments
1331 // -- rdx : code entry to call
1332 // -----------------------------------
1333
1334 Label invoke, dont_adapt_arguments;
1335 __ IncrementCounter(&Counters::arguments_adaptors, 1);
1336
1337 Label enough, too_few;
1338 __ cmpq(rax, rbx);
1339 __ j(less, &too_few);
1340 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1341 __ j(equal, &dont_adapt_arguments);
1342
1343 { // Enough parameters: Actual >= expected.
1344 __ bind(&enough);
1345 EnterArgumentsAdaptorFrame(masm);
1346
1347 // Copy receiver and all expected arguments.
1348 const int offset = StandardFrameConstants::kCallerSPOffset;
1349 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1350 __ movq(rcx, Immediate(-1)); // account for receiver
1351
1352 Label copy;
1353 __ bind(&copy);
1354 __ incq(rcx);
1355 __ push(Operand(rax, 0));
1356 __ subq(rax, Immediate(kPointerSize));
1357 __ cmpq(rcx, rbx);
1358 __ j(less, &copy);
1359 __ jmp(&invoke);
1360 }
1361
1362 { // Too few parameters: Actual < expected.
1363 __ bind(&too_few);
1364 EnterArgumentsAdaptorFrame(masm);
1365
1366 // Copy receiver and all actual arguments.
1367 const int offset = StandardFrameConstants::kCallerSPOffset;
1368 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1369 __ movq(rcx, Immediate(-1)); // account for receiver
1370
1371 Label copy;
1372 __ bind(&copy);
1373 __ incq(rcx);
1374 __ push(Operand(rdi, 0));
1375 __ subq(rdi, Immediate(kPointerSize));
1376 __ cmpq(rcx, rax);
1377 __ j(less, &copy);
1378
1379 // Fill remaining expected arguments with undefined values.
1380 Label fill;
1381 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1382 __ bind(&fill);
1383 __ incq(rcx);
1384 __ push(kScratchRegister);
1385 __ cmpq(rcx, rbx);
1386 __ j(less, &fill);
1387
1388 // Restore function pointer.
1389 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1390 }
1391
1392 // Call the entry point.
1393 __ bind(&invoke);
1394 __ call(rdx);
1395
1396 // Leave frame and return.
1397 LeaveArgumentsAdaptorFrame(masm);
1398 __ ret(0);
1399
1400 // -------------------------------------------
1401 // Dont adapt arguments.
1402 // -------------------------------------------
1403 __ bind(&dont_adapt_arguments);
1404 __ jmp(rdx);
1405}
1406
1407
1408void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1409 __ int3();
1410}
1411
1412
1413#undef __
1414
Steve Blocka7e24c12009-10-30 11:49:00 +00001415} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001416
1417#endif // V8_TARGET_ARCH_X64