blob: 456d0765b92886269b6dcc12cce9a383b1aa1a0e [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010029
30#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010033#include "deoptimizer.h"
34#include "full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Ben Murdochb0fe1622011-05-05 13:52:32 +010039
Steve Blocka7e24c12009-10-30 11:49:00 +000040#define __ ACCESS_MASM(masm)
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042
Leon Clarkee46be812010-01-19 14:06:41 +000043void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id,
45 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e -------------
47 // -- rax : number of arguments excluding receiver
48 // -- rdi : called function (only guaranteed when
49 // extra_args requires it)
50 // -- rsi : context
51 // -- rsp[0] : return address
52 // -- rsp[8] : last argument
53 // -- ...
54 // -- rsp[8 * argc] : first argument (argc == rax)
55 // -- rsp[8 * (argc +1)] : receiver
56 // -----------------------------------
57
58 // Insert extra arguments.
59 int num_extra_args = 0;
60 if (extra_args == NEEDS_CALLED_FUNCTION) {
61 num_extra_args = 1;
62 __ pop(kScratchRegister); // Save return address.
63 __ push(rdi);
64 __ push(kScratchRegister); // Restore return address.
65 } else {
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67 }
68
Steve Block6ded16b2010-05-10 14:33:55 +010069 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000070 // including the receiver and the extra arguments.
71 __ addq(rax, Immediate(num_extra_args + 1));
Steve Block6ded16b2010-05-10 14:33:55 +010072 __ JumpToExternalReference(ExternalReference(id), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000073}
74
75
Steve Blocka7e24c12009-10-30 11:49:00 +000076void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
77 // ----------- S t a t e -------------
78 // -- rax: number of arguments
79 // -- rdi: constructor function
80 // -----------------------------------
81
82 Label non_function_call;
83 // Check that function is not a smi.
84 __ JumpIfSmi(rdi, &non_function_call);
85 // Check that function is a JSFunction.
86 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
87 __ j(not_equal, &non_function_call);
88
89 // Jump to the function-specific construct stub.
90 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
91 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
92 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
93 __ jmp(rbx);
94
Kristian Monsen50ef84f2010-07-29 15:18:00 +010095 // rdi: called object
96 // rax: number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +000097 __ bind(&non_function_call);
Andrei Popescu402d9372010-02-26 13:31:12 +000098 // Set expected number of arguments to zero (not changing rax).
Steve Blocka7e24c12009-10-30 11:49:00 +000099 __ movq(rbx, Immediate(0));
100 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
101 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
102 RelocInfo::CODE_TARGET);
103}
104
105
Leon Clarkee46be812010-01-19 14:06:41 +0000106static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100107 bool is_api_function,
108 bool count_constructions) {
109 // Should never count constructions for api objects.
110 ASSERT(!is_api_function || !count_constructions);
111
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 // Enter a construct frame.
113 __ EnterConstructFrame();
114
115 // Store a smi-tagged arguments count on the stack.
116 __ Integer32ToSmi(rax, rax);
117 __ push(rax);
118
119 // Push the function to invoke on the stack.
120 __ push(rdi);
121
122 // Try to allocate the object without transitioning into C code. If any of the
123 // preconditions is not met, the code bails out to the runtime call.
124 Label rt_call, allocated;
125 if (FLAG_inline_new) {
126 Label undo_allocation;
127
128#ifdef ENABLE_DEBUGGER_SUPPORT
129 ExternalReference debug_step_in_fp =
130 ExternalReference::debug_step_in_fp_address();
131 __ movq(kScratchRegister, debug_step_in_fp);
132 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
133 __ j(not_equal, &rt_call);
134#endif
135
136 // Verified that the constructor is a JSFunction.
137 // Load the initial map and verify that it is in fact a map.
138 // rdi: constructor
139 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
140 // Will both indicate a NULL and a Smi
141 ASSERT(kSmiTag == 0);
142 __ JumpIfSmi(rax, &rt_call);
143 // rdi: constructor
144 // rax: initial map (if proven valid below)
145 __ CmpObjectType(rax, MAP_TYPE, rbx);
146 __ j(not_equal, &rt_call);
147
148 // Check that the constructor is not constructing a JSFunction (see comments
149 // in Runtime_NewObject in runtime.cc). In which case the initial map's
150 // instance type would be JS_FUNCTION_TYPE.
151 // rdi: constructor
152 // rax: initial map
153 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
154 __ j(equal, &rt_call);
155
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100156 if (count_constructions) {
157 Label allocate;
158 // Decrease generous allocation count.
159 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
160 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
161 __ j(not_zero, &allocate);
162
163 __ push(rax);
164 __ push(rdi);
165
166 __ push(rdi); // constructor
167 // The call will replace the stub, so the countdown is only done once.
168 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
169
170 __ pop(rdi);
171 __ pop(rax);
172
173 __ bind(&allocate);
174 }
175
Steve Blocka7e24c12009-10-30 11:49:00 +0000176 // Now allocate the JSObject on the heap.
177 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
178 __ shl(rdi, Immediate(kPointerSizeLog2));
179 // rdi: size of new object
180 __ AllocateInNewSpace(rdi,
181 rbx,
182 rdi,
183 no_reg,
184 &rt_call,
185 NO_ALLOCATION_FLAGS);
186 // Allocated the JSObject, now initialize the fields.
187 // rax: initial map
188 // rbx: JSObject (not HeapObject tagged - the actual address).
189 // rdi: start of next object
190 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
191 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
192 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
193 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
194 // Set extra fields in the newly allocated object.
195 // rax: initial map
196 // rbx: JSObject
197 // rdi: start of next object
198 { Label loop, entry;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100199 // To allow for truncation.
200 if (count_constructions) {
201 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
202 } else {
203 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
204 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000205 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
206 __ jmp(&entry);
207 __ bind(&loop);
208 __ movq(Operand(rcx, 0), rdx);
209 __ addq(rcx, Immediate(kPointerSize));
210 __ bind(&entry);
211 __ cmpq(rcx, rdi);
212 __ j(less, &loop);
213 }
214
215 // Add the object tag to make the JSObject real, so that we can continue and
216 // jump into the continuation code at any time from now on. Any failures
217 // need to undo the allocation, so that the heap is in a consistent state
218 // and verifiable.
219 // rax: initial map
220 // rbx: JSObject
221 // rdi: start of next object
222 __ or_(rbx, Immediate(kHeapObjectTag));
223
224 // Check if a non-empty properties array is needed.
225 // Allocate and initialize a FixedArray if it is.
226 // rax: initial map
227 // rbx: JSObject
228 // rdi: start of next object
229 // Calculate total properties described map.
230 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
231 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
232 __ addq(rdx, rcx);
233 // Calculate unused properties past the end of the in-object properties.
234 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
235 __ subq(rdx, rcx);
236 // Done if no extra properties are to be allocated.
237 __ j(zero, &allocated);
238 __ Assert(positive, "Property allocation count failed.");
239
240 // Scale the number of elements by pointer size and add the header for
241 // FixedArrays to the start of the next object calculation from above.
242 // rbx: JSObject
243 // rdi: start of next object (will be start of FixedArray)
244 // rdx: number of elements in properties array
245 __ AllocateInNewSpace(FixedArray::kHeaderSize,
246 times_pointer_size,
247 rdx,
248 rdi,
249 rax,
250 no_reg,
251 &undo_allocation,
252 RESULT_CONTAINS_TOP);
253
254 // Initialize the FixedArray.
255 // rbx: JSObject
256 // rdi: FixedArray
257 // rdx: number of elements
258 // rax: start of next object
259 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100260 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
261 __ Integer32ToSmi(rdx, rdx);
262 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
Steve Blocka7e24c12009-10-30 11:49:00 +0000263
264 // Initialize the fields to undefined.
265 // rbx: JSObject
266 // rdi: FixedArray
267 // rax: start of next object
268 // rdx: number of elements
269 { Label loop, entry;
270 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
271 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
272 __ jmp(&entry);
273 __ bind(&loop);
274 __ movq(Operand(rcx, 0), rdx);
275 __ addq(rcx, Immediate(kPointerSize));
276 __ bind(&entry);
277 __ cmpq(rcx, rax);
278 __ j(below, &loop);
279 }
280
281 // Store the initialized FixedArray into the properties field of
282 // the JSObject
283 // rbx: JSObject
284 // rdi: FixedArray
285 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
286 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
287
288
289 // Continue with JSObject being successfully allocated
290 // rbx: JSObject
291 __ jmp(&allocated);
292
293 // Undo the setting of the new top so that the heap is verifiable. For
294 // example, the map's unused properties potentially do not match the
295 // allocated objects unused properties.
296 // rbx: JSObject (previous new top)
297 __ bind(&undo_allocation);
298 __ UndoAllocationInNewSpace(rbx);
299 }
300
301 // Allocate the new receiver object using the runtime call.
302 // rdi: function (constructor)
303 __ bind(&rt_call);
304 // Must restore rdi (constructor) before calling runtime.
305 __ movq(rdi, Operand(rsp, 0));
306 __ push(rdi);
307 __ CallRuntime(Runtime::kNewObject, 1);
308 __ movq(rbx, rax); // store result in rbx
309
310 // New object allocated.
311 // rbx: newly allocated object
312 __ bind(&allocated);
313 // Retrieve the function from the stack.
314 __ pop(rdi);
315
316 // Retrieve smi-tagged arguments count from the stack.
317 __ movq(rax, Operand(rsp, 0));
318 __ SmiToInteger32(rax, rax);
319
320 // Push the allocated receiver to the stack. We need two copies
321 // because we may have to return the original one and the calling
322 // conventions dictate that the called function pops the receiver.
323 __ push(rbx);
324 __ push(rbx);
325
326 // Setup pointer to last argument.
327 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
328
329 // Copy arguments and receiver to the expression stack.
330 Label loop, entry;
331 __ movq(rcx, rax);
332 __ jmp(&entry);
333 __ bind(&loop);
334 __ push(Operand(rbx, rcx, times_pointer_size, 0));
335 __ bind(&entry);
336 __ decq(rcx);
337 __ j(greater_equal, &loop);
338
339 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +0000340 if (is_api_function) {
341 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
342 Handle<Code> code = Handle<Code>(
343 Builtins::builtin(Builtins::HandleApiCallConstruct));
344 ParameterCount expected(0);
345 __ InvokeCode(code, expected, expected,
346 RelocInfo::CODE_TARGET, CALL_FUNCTION);
347 } else {
348 ParameterCount actual(rax);
349 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
350 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000351
352 // Restore context from the frame.
353 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
354
355 // If the result is an object (in the ECMA sense), we should get rid
356 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
357 // on page 74.
358 Label use_receiver, exit;
359 // If the result is a smi, it is *not* an object in the ECMA sense.
360 __ JumpIfSmi(rax, &use_receiver);
361
362 // If the type of the result (stored in its map) is less than
363 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
364 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
365 __ j(above_equal, &exit);
366
367 // Throw away the result of the constructor invocation and use the
368 // on-stack receiver as the result.
369 __ bind(&use_receiver);
370 __ movq(rax, Operand(rsp, 0));
371
372 // Restore the arguments count and leave the construct frame.
373 __ bind(&exit);
374 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
375 __ LeaveConstructFrame();
376
377 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +0000378 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000379 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
380 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000381 __ push(rcx);
382 __ IncrementCounter(&Counters::constructed_objects, 1);
383 __ ret(0);
384}
385
386
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100387void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
388 Generate_JSConstructStubHelper(masm, false, true);
389}
390
391
Leon Clarkee46be812010-01-19 14:06:41 +0000392void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100393 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000394}
395
396
397void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100398 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000399}
400
401
Steve Blocka7e24c12009-10-30 11:49:00 +0000402static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
403 bool is_construct) {
404 // Expects five C++ function parameters.
405 // - Address entry (ignored)
406 // - JSFunction* function (
407 // - Object* receiver
408 // - int argc
409 // - Object*** argv
410 // (see Handle::Invoke in execution.cc).
411
412 // Platform specific argument handling. After this, the stack contains
413 // an internal frame and the pushed function and receiver, and
414 // register rax and rbx holds the argument count and argument array,
415 // while rdi holds the function pointer and rsi the context.
416#ifdef _WIN64
417 // MSVC parameters in:
418 // rcx : entry (ignored)
419 // rdx : function
420 // r8 : receiver
421 // r9 : argc
422 // [rsp+0x20] : argv
423
424 // Clear the context before we push it when entering the JS frame.
425 __ xor_(rsi, rsi);
426 __ EnterInternalFrame();
427
428 // Load the function context into rsi.
429 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
430
431 // Push the function and the receiver onto the stack.
432 __ push(rdx);
433 __ push(r8);
434
435 // Load the number of arguments and setup pointer to the arguments.
436 __ movq(rax, r9);
437 // Load the previous frame pointer to access C argument on stack
438 __ movq(kScratchRegister, Operand(rbp, 0));
439 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
440 // Load the function pointer into rdi.
441 __ movq(rdi, rdx);
Steve Block6ded16b2010-05-10 14:33:55 +0100442#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 // GCC parameters in:
444 // rdi : entry (ignored)
445 // rsi : function
446 // rdx : receiver
447 // rcx : argc
448 // r8 : argv
449
450 __ movq(rdi, rsi);
451 // rdi : function
452
453 // Clear the context before we push it when entering the JS frame.
454 __ xor_(rsi, rsi);
455 // Enter an internal frame.
456 __ EnterInternalFrame();
457
458 // Push the function and receiver and setup the context.
459 __ push(rdi);
460 __ push(rdx);
461 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
462
463 // Load the number of arguments and setup pointer to the arguments.
464 __ movq(rax, rcx);
465 __ movq(rbx, r8);
466#endif // _WIN64
467
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 // Current stack contents:
469 // [rsp + 2 * kPointerSize ... ]: Internal frame
470 // [rsp + kPointerSize] : function
471 // [rsp] : receiver
472 // Current register contents:
473 // rax : argc
474 // rbx : argv
475 // rsi : context
476 // rdi : function
477
478 // Copy arguments to the stack in a loop.
479 // Register rbx points to array of pointers to handle locations.
480 // Push the values of these handles.
481 Label loop, entry;
482 __ xor_(rcx, rcx); // Set loop variable to 0.
483 __ jmp(&entry);
484 __ bind(&loop);
485 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
486 __ push(Operand(kScratchRegister, 0)); // dereference handle
487 __ addq(rcx, Immediate(1));
488 __ bind(&entry);
489 __ cmpq(rcx, rax);
490 __ j(not_equal, &loop);
491
492 // Invoke the code.
493 if (is_construct) {
494 // Expects rdi to hold function pointer.
495 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
496 RelocInfo::CODE_TARGET);
497 } else {
498 ParameterCount actual(rax);
499 // Function must be in rdi.
500 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
501 }
502
503 // Exit the JS frame. Notice that this also removes the empty
504 // context and the function left on the stack by the code
505 // invocation.
506 __ LeaveInternalFrame();
507 // TODO(X64): Is argument correct? Is there a receiver to remove?
508 __ ret(1 * kPointerSize); // remove receiver
509}
510
511
512void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
513 Generate_JSEntryTrampolineHelper(masm, false);
514}
515
516
517void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
518 Generate_JSEntryTrampolineHelper(masm, true);
519}
520
Iain Merrick75681382010-08-19 15:07:18 +0100521
522void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
523 // Enter an internal frame.
524 __ EnterInternalFrame();
525
526 // Push a copy of the function onto the stack.
527 __ push(rdi);
528
529 __ push(rdi); // Function is also the parameter to the runtime call.
530 __ CallRuntime(Runtime::kLazyCompile, 1);
531 __ pop(rdi);
532
533 // Tear down temporary frame.
534 __ LeaveInternalFrame();
535
536 // Do a tail-call of the compiled function.
537 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
538 __ jmp(rcx);
539}
540
Ben Murdochb0fe1622011-05-05 13:52:32 +0100541
542void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
543 // Enter an internal frame.
544 __ EnterInternalFrame();
545
546 // Push a copy of the function onto the stack.
547 __ push(rdi);
548
549 __ push(rdi); // Function is also the parameter to the runtime call.
550 __ CallRuntime(Runtime::kLazyRecompile, 1);
551
552 // Restore function and tear down temporary frame.
553 __ pop(rdi);
554 __ LeaveInternalFrame();
555
556 // Do a tail-call of the compiled function.
557 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
558 __ jmp(rcx);
559}
560
561
562static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
563 Deoptimizer::BailoutType type) {
564 __ int3();
565}
566
567void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
568 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
569}
570
571
572void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
573 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
574}
575
576
577void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
578 __ int3();
579}
580
581
582void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
583 // Stack Layout:
584 // rsp[0]: Return address
585 // rsp[1]: Argument n
586 // rsp[2]: Argument n-1
587 // ...
588 // rsp[n]: Argument 1
589 // rsp[n+1]: Receiver (function to call)
590 //
591 // rax contains the number of arguments, n, not counting the receiver.
592 //
593 // 1. Make sure we have at least one argument.
594 { Label done;
595 __ testq(rax, rax);
596 __ j(not_zero, &done);
597 __ pop(rbx);
598 __ Push(Factory::undefined_value());
599 __ push(rbx);
600 __ incq(rax);
601 __ bind(&done);
602 }
603
604 // 2. Get the function to call (passed as receiver) from the stack, check
605 // if it is a function.
606 Label non_function;
607 // The function to call is at position n+1 on the stack.
608 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
609 __ JumpIfSmi(rdi, &non_function);
610 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
611 __ j(not_equal, &non_function);
612
613 // 3a. Patch the first argument if necessary when calling a function.
614 Label shift_arguments;
615 { Label convert_to_object, use_global_receiver, patch_receiver;
616 // Change context eagerly in case we need the global receiver.
617 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
618
619 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
620 __ JumpIfSmi(rbx, &convert_to_object);
621
622 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
623 __ j(equal, &use_global_receiver);
624 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
625 __ j(equal, &use_global_receiver);
626
627 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
628 __ j(below, &convert_to_object);
629 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
630 __ j(below_equal, &shift_arguments);
631
632 __ bind(&convert_to_object);
633 __ EnterInternalFrame(); // In order to preserve argument count.
634 __ Integer32ToSmi(rax, rax);
635 __ push(rax);
636
637 __ push(rbx);
638 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
639 __ movq(rbx, rax);
640
641 __ pop(rax);
642 __ SmiToInteger32(rax, rax);
643 __ LeaveInternalFrame();
644 // Restore the function to rdi.
645 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
646 __ jmp(&patch_receiver);
647
648 // Use the global receiver object from the called function as the
649 // receiver.
650 __ bind(&use_global_receiver);
651 const int kGlobalIndex =
652 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
653 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
654 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
655 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
656 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
657
658 __ bind(&patch_receiver);
659 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
660
661 __ jmp(&shift_arguments);
662 }
663
664
665 // 3b. Patch the first argument when calling a non-function. The
666 // CALL_NON_FUNCTION builtin expects the non-function callee as
667 // receiver, so overwrite the first argument which will ultimately
668 // become the receiver.
669 __ bind(&non_function);
670 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
671 __ xor_(rdi, rdi);
672
673 // 4. Shift arguments and return address one slot down on the stack
674 // (overwriting the original receiver). Adjust argument count to make
675 // the original first argument the new receiver.
676 __ bind(&shift_arguments);
677 { Label loop;
678 __ movq(rcx, rax);
679 __ bind(&loop);
680 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
681 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
682 __ decq(rcx);
683 __ j(not_sign, &loop); // While non-negative (to copy return address).
684 __ pop(rbx); // Discard copy of return address.
685 __ decq(rax); // One fewer argument (first argument is new receiver).
686 }
687
688 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
689 { Label function;
690 __ testq(rdi, rdi);
691 __ j(not_zero, &function);
692 __ xor_(rbx, rbx);
693 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
694 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
695 RelocInfo::CODE_TARGET);
696 __ bind(&function);
697 }
698
699 // 5b. Get the code to call from the function and check that the number of
700 // expected arguments matches what we're providing. If so, jump
701 // (tail-call) to the code in register edx without checking arguments.
702 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
703 __ movsxlq(rbx,
704 FieldOperand(rdx,
705 SharedFunctionInfo::kFormalParameterCountOffset));
706 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
707 __ cmpq(rax, rbx);
708 __ j(not_equal,
709 Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
710 RelocInfo::CODE_TARGET);
711
712 ParameterCount expected(0);
713 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
714}
715
716
717void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
718 // Stack at entry:
719 // rsp: return address
720 // rsp+8: arguments
721 // rsp+16: receiver ("this")
722 // rsp+24: function
723 __ EnterInternalFrame();
724 // Stack frame:
725 // rbp: Old base pointer
726 // rbp[1]: return address
727 // rbp[2]: function arguments
728 // rbp[3]: receiver
729 // rbp[4]: function
730 static const int kArgumentsOffset = 2 * kPointerSize;
731 static const int kReceiverOffset = 3 * kPointerSize;
732 static const int kFunctionOffset = 4 * kPointerSize;
733 __ push(Operand(rbp, kFunctionOffset));
734 __ push(Operand(rbp, kArgumentsOffset));
735 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
736
737 // Check the stack for overflow. We are not trying need to catch
738 // interruptions (e.g. debug break and preemption) here, so the "real stack
739 // limit" is checked.
740 Label okay;
741 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
742 __ movq(rcx, rsp);
743 // Make rcx the space we have left. The stack might already be overflowed
744 // here which will cause rcx to become negative.
745 __ subq(rcx, kScratchRegister);
746 // Make rdx the space we need for the array when it is unrolled onto the
747 // stack.
748 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
749 // Check if the arguments will overflow the stack.
750 __ cmpq(rcx, rdx);
751 __ j(greater, &okay); // Signed comparison.
752
753 // Out of stack space.
754 __ push(Operand(rbp, kFunctionOffset));
755 __ push(rax);
756 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
757 __ bind(&okay);
758 // End of stack check.
759
760 // Push current index and limit.
761 const int kLimitOffset =
762 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
763 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
764 __ push(rax); // limit
765 __ push(Immediate(0)); // index
766
767 // Change context eagerly to get the right global object if
768 // necessary.
769 __ movq(rdi, Operand(rbp, kFunctionOffset));
770 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
771
772 // Compute the receiver.
773 Label call_to_object, use_global_receiver, push_receiver;
774 __ movq(rbx, Operand(rbp, kReceiverOffset));
775 __ JumpIfSmi(rbx, &call_to_object);
776 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
777 __ j(equal, &use_global_receiver);
778 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
779 __ j(equal, &use_global_receiver);
780
781 // If given receiver is already a JavaScript object then there's no
782 // reason for converting it.
783 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
784 __ j(below, &call_to_object);
785 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
786 __ j(below_equal, &push_receiver);
787
788 // Convert the receiver to an object.
789 __ bind(&call_to_object);
790 __ push(rbx);
791 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
792 __ movq(rbx, rax);
793 __ jmp(&push_receiver);
794
795 // Use the current global receiver object as the receiver.
796 __ bind(&use_global_receiver);
797 const int kGlobalOffset =
798 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
799 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
800 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
801 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
802 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
803
804 // Push the receiver.
805 __ bind(&push_receiver);
806 __ push(rbx);
807
808 // Copy all arguments from the array to the stack.
809 Label entry, loop;
810 __ movq(rax, Operand(rbp, kIndexOffset));
811 __ jmp(&entry);
812 __ bind(&loop);
813 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
814
815 // Use inline caching to speed up access to arguments.
816 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
817 __ Call(ic, RelocInfo::CODE_TARGET);
818 // It is important that we do not have a test instruction after the
819 // call. A test instruction after the call is used to indicate that
820 // we have generated an inline version of the keyed load. In this
821 // case, we know that we are not generating a test instruction next.
822
823 // Push the nth argument.
824 __ push(rax);
825
826 // Update the index on the stack and in register rax.
827 __ movq(rax, Operand(rbp, kIndexOffset));
828 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
829 __ movq(Operand(rbp, kIndexOffset), rax);
830
831 __ bind(&entry);
832 __ cmpq(rax, Operand(rbp, kLimitOffset));
833 __ j(not_equal, &loop);
834
835 // Invoke the function.
836 ParameterCount actual(rax);
837 __ SmiToInteger32(rax, rax);
838 __ movq(rdi, Operand(rbp, kFunctionOffset));
839 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
840
841 __ LeaveInternalFrame();
842 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
843}
844
845
846// Number of empty elements to allocate for an empty array.
847static const int kPreallocatedArrayElements = 4;
848
849
850// Allocate an empty JSArray. The allocated array is put into the result
851// register. If the parameter initial_capacity is larger than zero an elements
852// backing store is allocated with this size and filled with the hole values.
853// Otherwise the elements backing store is set to the empty FixedArray.
854static void AllocateEmptyJSArray(MacroAssembler* masm,
855 Register array_function,
856 Register result,
857 Register scratch1,
858 Register scratch2,
859 Register scratch3,
860 int initial_capacity,
861 Label* gc_required) {
862 ASSERT(initial_capacity >= 0);
863
864 // Load the initial map from the array function.
865 __ movq(scratch1, FieldOperand(array_function,
866 JSFunction::kPrototypeOrInitialMapOffset));
867
868 // Allocate the JSArray object together with space for a fixed array with the
869 // requested elements.
870 int size = JSArray::kSize;
871 if (initial_capacity > 0) {
872 size += FixedArray::SizeFor(initial_capacity);
873 }
874 __ AllocateInNewSpace(size,
875 result,
876 scratch2,
877 scratch3,
878 gc_required,
879 TAG_OBJECT);
880
881 // Allocated the JSArray. Now initialize the fields except for the elements
882 // array.
883 // result: JSObject
884 // scratch1: initial map
885 // scratch2: start of next object
886 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
887 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
888 Factory::empty_fixed_array());
889 // Field JSArray::kElementsOffset is initialized later.
890 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
891
892 // If no storage is requested for the elements array just set the empty
893 // fixed array.
894 if (initial_capacity == 0) {
895 __ Move(FieldOperand(result, JSArray::kElementsOffset),
896 Factory::empty_fixed_array());
897 return;
898 }
899
900 // Calculate the location of the elements array and set elements array member
901 // of the JSArray.
902 // result: JSObject
903 // scratch2: start of next object
904 __ lea(scratch1, Operand(result, JSArray::kSize));
905 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
906
907 // Initialize the FixedArray and fill it with holes. FixedArray length is
908 // stored as a smi.
909 // result: JSObject
910 // scratch1: elements array
911 // scratch2: start of next object
912 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
913 Factory::fixed_array_map());
914 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
915 Smi::FromInt(initial_capacity));
916
917 // Fill the FixedArray with the hole value. Inline the code if short.
918 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
919 static const int kLoopUnfoldLimit = 4;
920 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
921 __ Move(scratch3, Factory::the_hole_value());
922 if (initial_capacity <= kLoopUnfoldLimit) {
923 // Use a scratch register here to have only one reloc info when unfolding
924 // the loop.
925 for (int i = 0; i < initial_capacity; i++) {
926 __ movq(FieldOperand(scratch1,
927 FixedArray::kHeaderSize + i * kPointerSize),
928 scratch3);
929 }
930 } else {
931 Label loop, entry;
932 __ jmp(&entry);
933 __ bind(&loop);
934 __ movq(Operand(scratch1, 0), scratch3);
935 __ addq(scratch1, Immediate(kPointerSize));
936 __ bind(&entry);
937 __ cmpq(scratch1, scratch2);
938 __ j(below, &loop);
939 }
940}
941
942
943// Allocate a JSArray with the number of elements stored in a register. The
944// register array_function holds the built-in Array function and the register
945// array_size holds the size of the array as a smi. The allocated array is put
946// into the result register and beginning and end of the FixedArray elements
947// storage is put into registers elements_array and elements_array_end (see
948// below for when that is not the case). If the parameter fill_with_holes is
949// true the allocated elements backing store is filled with the hole values
950// otherwise it is left uninitialized. When the backing store is filled the
951// register elements_array is scratched.
952static void AllocateJSArray(MacroAssembler* masm,
953 Register array_function, // Array function.
954 Register array_size, // As a smi.
955 Register result,
956 Register elements_array,
957 Register elements_array_end,
958 Register scratch,
959 bool fill_with_hole,
960 Label* gc_required) {
961 Label not_empty, allocated;
962
963 // Load the initial map from the array function.
964 __ movq(elements_array,
965 FieldOperand(array_function,
966 JSFunction::kPrototypeOrInitialMapOffset));
967
968 // Check whether an empty sized array is requested.
969 __ testq(array_size, array_size);
970 __ j(not_zero, &not_empty);
971
972 // If an empty array is requested allocate a small elements array anyway. This
973 // keeps the code below free of special casing for the empty array.
974 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
975 __ AllocateInNewSpace(size,
976 result,
977 elements_array_end,
978 scratch,
979 gc_required,
980 TAG_OBJECT);
981 __ jmp(&allocated);
982
983 // Allocate the JSArray object together with space for a FixedArray with the
984 // requested elements.
985 __ bind(&not_empty);
986 SmiIndex index =
987 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
988 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
989 index.scale,
990 index.reg,
991 result,
992 elements_array_end,
993 scratch,
994 gc_required,
995 TAG_OBJECT);
996
997 // Allocated the JSArray. Now initialize the fields except for the elements
998 // array.
999 // result: JSObject
1000 // elements_array: initial map
1001 // elements_array_end: start of next object
1002 // array_size: size of array (smi)
1003 __ bind(&allocated);
1004 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
1005 __ Move(elements_array, Factory::empty_fixed_array());
1006 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1007 // Field JSArray::kElementsOffset is initialized later.
1008 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1009
1010 // Calculate the location of the elements array and set elements array member
1011 // of the JSArray.
1012 // result: JSObject
1013 // elements_array_end: start of next object
1014 // array_size: size of array (smi)
1015 __ lea(elements_array, Operand(result, JSArray::kSize));
1016 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1017
1018 // Initialize the fixed array. FixedArray length is stored as a smi.
1019 // result: JSObject
1020 // elements_array: elements array
1021 // elements_array_end: start of next object
1022 // array_size: size of array (smi)
1023 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
1024 Factory::fixed_array_map());
1025 Label not_empty_2, fill_array;
1026 __ SmiTest(array_size);
1027 __ j(not_zero, &not_empty_2);
1028 // Length of the FixedArray is the number of pre-allocated elements even
1029 // though the actual JSArray has length 0.
1030 __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
1031 Smi::FromInt(kPreallocatedArrayElements));
1032 __ jmp(&fill_array);
1033 __ bind(&not_empty_2);
1034 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1035 // same.
1036 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1037
1038 // Fill the allocated FixedArray with the hole value if requested.
1039 // result: JSObject
1040 // elements_array: elements array
1041 // elements_array_end: start of next object
1042 __ bind(&fill_array);
1043 if (fill_with_hole) {
1044 Label loop, entry;
1045 __ Move(scratch, Factory::the_hole_value());
1046 __ lea(elements_array, Operand(elements_array,
1047 FixedArray::kHeaderSize - kHeapObjectTag));
1048 __ jmp(&entry);
1049 __ bind(&loop);
1050 __ movq(Operand(elements_array, 0), scratch);
1051 __ addq(elements_array, Immediate(kPointerSize));
1052 __ bind(&entry);
1053 __ cmpq(elements_array, elements_array_end);
1054 __ j(below, &loop);
1055 }
1056}
1057
1058
1059// Create a new array for the built-in Array function. This function allocates
1060// the JSArray object and the FixedArray elements array and initializes these.
1061// If the Array cannot be constructed in native code the runtime is called. This
1062// function assumes the following state:
1063// rdi: constructor (built-in Array function)
1064// rax: argc
1065// rsp[0]: return address
1066// rsp[8]: last argument
1067// This function is used for both construct and normal calls of Array. The only
1068// difference between handling a construct call and a normal call is that for a
1069// construct call the constructor function in rdi needs to be preserved for
1070// entering the generic code. In both cases argc in rax needs to be preserved.
1071// Both registers are preserved by this code so no need to differentiate between
1072// a construct call and a normal call.
1073static void ArrayNativeCode(MacroAssembler* masm,
1074 Label *call_generic_code) {
1075 Label argc_one_or_more, argc_two_or_more;
1076
1077 // Check for array construction with zero arguments.
1078 __ testq(rax, rax);
1079 __ j(not_zero, &argc_one_or_more);
1080
1081 // Handle construction of an empty array.
1082 AllocateEmptyJSArray(masm,
1083 rdi,
1084 rbx,
1085 rcx,
1086 rdx,
1087 r8,
1088 kPreallocatedArrayElements,
1089 call_generic_code);
1090 __ IncrementCounter(&Counters::array_function_native, 1);
1091 __ movq(rax, rbx);
1092 __ ret(kPointerSize);
1093
1094 // Check for one argument. Bail out if argument is not smi or if it is
1095 // negative.
1096 __ bind(&argc_one_or_more);
1097 __ cmpq(rax, Immediate(1));
1098 __ j(not_equal, &argc_two_or_more);
1099 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
1100 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1101
1102 // Handle construction of an empty array of a certain size. Bail out if size
1103 // is to large to actually allocate an elements array.
1104 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
1105 __ j(greater_equal, call_generic_code);
1106
1107 // rax: argc
1108 // rdx: array_size (smi)
1109 // rdi: constructor
1110 // esp[0]: return address
1111 // esp[8]: argument
1112 AllocateJSArray(masm,
1113 rdi,
1114 rdx,
1115 rbx,
1116 rcx,
1117 r8,
1118 r9,
1119 true,
1120 call_generic_code);
1121 __ IncrementCounter(&Counters::array_function_native, 1);
1122 __ movq(rax, rbx);
1123 __ ret(2 * kPointerSize);
1124
1125 // Handle construction of an array from a list of arguments.
1126 __ bind(&argc_two_or_more);
1127 __ movq(rdx, rax);
1128 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1129 // rax: argc
1130 // rdx: array_size (smi)
1131 // rdi: constructor
1132 // esp[0] : return address
1133 // esp[8] : last argument
1134 AllocateJSArray(masm,
1135 rdi,
1136 rdx,
1137 rbx,
1138 rcx,
1139 r8,
1140 r9,
1141 false,
1142 call_generic_code);
1143 __ IncrementCounter(&Counters::array_function_native, 1);
1144
1145 // rax: argc
1146 // rbx: JSArray
1147 // rcx: elements_array
1148 // r8: elements_array_end (untagged)
1149 // esp[0]: return address
1150 // esp[8]: last argument
1151
1152 // Location of the last argument
1153 __ lea(r9, Operand(rsp, kPointerSize));
1154
1155 // Location of the first array element (Parameter fill_with_holes to
1156 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1157 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1158
1159 // rax: argc
1160 // rbx: JSArray
1161 // rdx: location of the first array element
1162 // r9: location of the last argument
1163 // esp[0]: return address
1164 // esp[8]: last argument
1165 Label loop, entry;
1166 __ movq(rcx, rax);
1167 __ jmp(&entry);
1168 __ bind(&loop);
1169 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
1170 __ movq(Operand(rdx, 0), kScratchRegister);
1171 __ addq(rdx, Immediate(kPointerSize));
1172 __ bind(&entry);
1173 __ decq(rcx);
1174 __ j(greater_equal, &loop);
1175
1176 // Remove caller arguments from the stack and return.
1177 // rax: argc
1178 // rbx: JSArray
1179 // esp[0]: return address
1180 // esp[8]: last argument
1181 __ pop(rcx);
1182 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1183 __ push(rcx);
1184 __ movq(rax, rbx);
1185 __ ret(0);
1186}
1187
1188
1189void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1190 // ----------- S t a t e -------------
1191 // -- rax : argc
1192 // -- rsp[0] : return address
1193 // -- rsp[8] : last argument
1194 // -----------------------------------
1195 Label generic_array_code;
1196
1197 // Get the Array function.
1198 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1199
1200 if (FLAG_debug_code) {
1201 // Initial map for the builtin Array function shoud be a map.
1202 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1203 // Will both indicate a NULL and a Smi.
1204 ASSERT(kSmiTag == 0);
1205 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1206 __ Check(not_smi, "Unexpected initial map for Array function");
1207 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1208 __ Check(equal, "Unexpected initial map for Array function");
1209 }
1210
1211 // Run the native code for the Array function called as a normal function.
1212 ArrayNativeCode(masm, &generic_array_code);
1213
1214 // Jump to the generic array code in case the specialized code cannot handle
1215 // the construction.
1216 __ bind(&generic_array_code);
1217 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
1218 Handle<Code> array_code(code);
1219 __ Jump(array_code, RelocInfo::CODE_TARGET);
1220}
1221
1222
1223void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1224 // ----------- S t a t e -------------
1225 // -- rax : argc
1226 // -- rdi : constructor
1227 // -- rsp[0] : return address
1228 // -- rsp[8] : last argument
1229 // -----------------------------------
1230 Label generic_constructor;
1231
1232 if (FLAG_debug_code) {
1233 // The array construct code is only set for the builtin Array function which
1234 // does always have a map.
1235 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rbx);
1236 __ cmpq(rdi, rbx);
1237 __ Check(equal, "Unexpected Array function");
1238 // Initial map for the builtin Array function should be a map.
1239 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1240 // Will both indicate a NULL and a Smi.
1241 ASSERT(kSmiTag == 0);
1242 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1243 __ Check(not_smi, "Unexpected initial map for Array function");
1244 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1245 __ Check(equal, "Unexpected initial map for Array function");
1246 }
1247
1248 // Run the native code for the Array function called as constructor.
1249 ArrayNativeCode(masm, &generic_constructor);
1250
1251 // Jump to the generic construct code in case the specialized code cannot
1252 // handle the construction.
1253 __ bind(&generic_constructor);
1254 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1255 Handle<Code> generic_construct_stub(code);
1256 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1257}
1258
1259
1260void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1261 // TODO(849): implement custom construct stub.
1262 // Generate a copy of the generic stub for now.
1263 Generate_JSConstructStubGeneric(masm);
1264}
1265
1266
1267static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1268 __ push(rbp);
1269 __ movq(rbp, rsp);
1270
1271 // Store the arguments adaptor context sentinel.
1272 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1273
1274 // Push the function on the stack.
1275 __ push(rdi);
1276
1277 // Preserve the number of arguments on the stack. Must preserve both
1278 // rax and rbx because these registers are used when copying the
1279 // arguments and the receiver.
1280 __ Integer32ToSmi(rcx, rax);
1281 __ push(rcx);
1282}
1283
1284
1285static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1286 // Retrieve the number of arguments from the stack. Number is a Smi.
1287 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1288
1289 // Leave the frame.
1290 __ movq(rsp, rbp);
1291 __ pop(rbp);
1292
1293 // Remove caller arguments from the stack.
1294 __ pop(rcx);
1295 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1296 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1297 __ push(rcx);
1298}
1299
1300
1301void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1302 // ----------- S t a t e -------------
1303 // -- rax : actual number of arguments
1304 // -- rbx : expected number of arguments
1305 // -- rdx : code entry to call
1306 // -----------------------------------
1307
1308 Label invoke, dont_adapt_arguments;
1309 __ IncrementCounter(&Counters::arguments_adaptors, 1);
1310
1311 Label enough, too_few;
1312 __ cmpq(rax, rbx);
1313 __ j(less, &too_few);
1314 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1315 __ j(equal, &dont_adapt_arguments);
1316
1317 { // Enough parameters: Actual >= expected.
1318 __ bind(&enough);
1319 EnterArgumentsAdaptorFrame(masm);
1320
1321 // Copy receiver and all expected arguments.
1322 const int offset = StandardFrameConstants::kCallerSPOffset;
1323 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1324 __ movq(rcx, Immediate(-1)); // account for receiver
1325
1326 Label copy;
1327 __ bind(&copy);
1328 __ incq(rcx);
1329 __ push(Operand(rax, 0));
1330 __ subq(rax, Immediate(kPointerSize));
1331 __ cmpq(rcx, rbx);
1332 __ j(less, &copy);
1333 __ jmp(&invoke);
1334 }
1335
1336 { // Too few parameters: Actual < expected.
1337 __ bind(&too_few);
1338 EnterArgumentsAdaptorFrame(masm);
1339
1340 // Copy receiver and all actual arguments.
1341 const int offset = StandardFrameConstants::kCallerSPOffset;
1342 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1343 __ movq(rcx, Immediate(-1)); // account for receiver
1344
1345 Label copy;
1346 __ bind(&copy);
1347 __ incq(rcx);
1348 __ push(Operand(rdi, 0));
1349 __ subq(rdi, Immediate(kPointerSize));
1350 __ cmpq(rcx, rax);
1351 __ j(less, &copy);
1352
1353 // Fill remaining expected arguments with undefined values.
1354 Label fill;
1355 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1356 __ bind(&fill);
1357 __ incq(rcx);
1358 __ push(kScratchRegister);
1359 __ cmpq(rcx, rbx);
1360 __ j(less, &fill);
1361
1362 // Restore function pointer.
1363 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1364 }
1365
1366 // Call the entry point.
1367 __ bind(&invoke);
1368 __ call(rdx);
1369
1370 // Leave frame and return.
1371 LeaveArgumentsAdaptorFrame(masm);
1372 __ ret(0);
1373
1374 // -------------------------------------------
1375 // Dont adapt arguments.
1376 // -------------------------------------------
1377 __ bind(&dont_adapt_arguments);
1378 __ jmp(rdx);
1379}
1380
1381
1382void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1383 __ int3();
1384}
1385
1386
1387#undef __
1388
Steve Blocka7e24c12009-10-30 11:49:00 +00001389} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001390
1391#endif // V8_TARGET_ARCH_X64