blob: b545876e21cda738e2ad0bbdf517f144734793c9 [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010029
30#if defined(V8_TARGET_ARCH_X64)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010033#include "deoptimizer.h"
34#include "full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Ben Murdochb0fe1622011-05-05 13:52:32 +010039
Steve Blocka7e24c12009-10-30 11:49:00 +000040#define __ ACCESS_MASM(masm)
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042
Leon Clarkee46be812010-01-19 14:06:41 +000043void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id,
45 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e -------------
47 // -- rax : number of arguments excluding receiver
48 // -- rdi : called function (only guaranteed when
49 // extra_args requires it)
50 // -- rsi : context
51 // -- rsp[0] : return address
52 // -- rsp[8] : last argument
53 // -- ...
54 // -- rsp[8 * argc] : first argument (argc == rax)
55 // -- rsp[8 * (argc +1)] : receiver
56 // -----------------------------------
57
58 // Insert extra arguments.
59 int num_extra_args = 0;
60 if (extra_args == NEEDS_CALLED_FUNCTION) {
61 num_extra_args = 1;
62 __ pop(kScratchRegister); // Save return address.
63 __ push(rdi);
64 __ push(kScratchRegister); // Restore return address.
65 } else {
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67 }
68
Steve Block6ded16b2010-05-10 14:33:55 +010069 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000070 // including the receiver and the extra arguments.
71 __ addq(rax, Immediate(num_extra_args + 1));
Steve Block6ded16b2010-05-10 14:33:55 +010072 __ JumpToExternalReference(ExternalReference(id), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000073}
74
75
Steve Blocka7e24c12009-10-30 11:49:00 +000076void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
77 // ----------- S t a t e -------------
78 // -- rax: number of arguments
79 // -- rdi: constructor function
80 // -----------------------------------
81
82 Label non_function_call;
83 // Check that function is not a smi.
84 __ JumpIfSmi(rdi, &non_function_call);
85 // Check that function is a JSFunction.
86 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
87 __ j(not_equal, &non_function_call);
88
89 // Jump to the function-specific construct stub.
90 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
91 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
92 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
93 __ jmp(rbx);
94
Kristian Monsen50ef84f2010-07-29 15:18:00 +010095 // rdi: called object
96 // rax: number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +000097 __ bind(&non_function_call);
Andrei Popescu402d9372010-02-26 13:31:12 +000098 // Set expected number of arguments to zero (not changing rax).
Steve Blocka7e24c12009-10-30 11:49:00 +000099 __ movq(rbx, Immediate(0));
100 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
101 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
102 RelocInfo::CODE_TARGET);
103}
104
105
Leon Clarkee46be812010-01-19 14:06:41 +0000106static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100107 bool is_api_function,
108 bool count_constructions) {
109 // Should never count constructions for api objects.
110 ASSERT(!is_api_function || !count_constructions);
111
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 // Enter a construct frame.
113 __ EnterConstructFrame();
114
115 // Store a smi-tagged arguments count on the stack.
116 __ Integer32ToSmi(rax, rax);
117 __ push(rax);
118
119 // Push the function to invoke on the stack.
120 __ push(rdi);
121
122 // Try to allocate the object without transitioning into C code. If any of the
123 // preconditions is not met, the code bails out to the runtime call.
124 Label rt_call, allocated;
125 if (FLAG_inline_new) {
126 Label undo_allocation;
127
128#ifdef ENABLE_DEBUGGER_SUPPORT
129 ExternalReference debug_step_in_fp =
130 ExternalReference::debug_step_in_fp_address();
131 __ movq(kScratchRegister, debug_step_in_fp);
132 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
133 __ j(not_equal, &rt_call);
134#endif
135
136 // Verified that the constructor is a JSFunction.
137 // Load the initial map and verify that it is in fact a map.
138 // rdi: constructor
139 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
140 // Will both indicate a NULL and a Smi
141 ASSERT(kSmiTag == 0);
142 __ JumpIfSmi(rax, &rt_call);
143 // rdi: constructor
144 // rax: initial map (if proven valid below)
145 __ CmpObjectType(rax, MAP_TYPE, rbx);
146 __ j(not_equal, &rt_call);
147
148 // Check that the constructor is not constructing a JSFunction (see comments
149 // in Runtime_NewObject in runtime.cc). In which case the initial map's
150 // instance type would be JS_FUNCTION_TYPE.
151 // rdi: constructor
152 // rax: initial map
153 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
154 __ j(equal, &rt_call);
155
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100156 if (count_constructions) {
157 Label allocate;
158 // Decrease generous allocation count.
159 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
160 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
161 __ j(not_zero, &allocate);
162
163 __ push(rax);
164 __ push(rdi);
165
166 __ push(rdi); // constructor
167 // The call will replace the stub, so the countdown is only done once.
168 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
169
170 __ pop(rdi);
171 __ pop(rax);
172
173 __ bind(&allocate);
174 }
175
Steve Blocka7e24c12009-10-30 11:49:00 +0000176 // Now allocate the JSObject on the heap.
177 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
178 __ shl(rdi, Immediate(kPointerSizeLog2));
179 // rdi: size of new object
180 __ AllocateInNewSpace(rdi,
181 rbx,
182 rdi,
183 no_reg,
184 &rt_call,
185 NO_ALLOCATION_FLAGS);
186 // Allocated the JSObject, now initialize the fields.
187 // rax: initial map
188 // rbx: JSObject (not HeapObject tagged - the actual address).
189 // rdi: start of next object
190 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
191 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
192 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
193 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
194 // Set extra fields in the newly allocated object.
195 // rax: initial map
196 // rbx: JSObject
197 // rdi: start of next object
198 { Label loop, entry;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100199 // To allow for truncation.
200 if (count_constructions) {
201 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
202 } else {
203 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
204 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000205 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
206 __ jmp(&entry);
207 __ bind(&loop);
208 __ movq(Operand(rcx, 0), rdx);
209 __ addq(rcx, Immediate(kPointerSize));
210 __ bind(&entry);
211 __ cmpq(rcx, rdi);
212 __ j(less, &loop);
213 }
214
215 // Add the object tag to make the JSObject real, so that we can continue and
216 // jump into the continuation code at any time from now on. Any failures
217 // need to undo the allocation, so that the heap is in a consistent state
218 // and verifiable.
219 // rax: initial map
220 // rbx: JSObject
221 // rdi: start of next object
222 __ or_(rbx, Immediate(kHeapObjectTag));
223
224 // Check if a non-empty properties array is needed.
225 // Allocate and initialize a FixedArray if it is.
226 // rax: initial map
227 // rbx: JSObject
228 // rdi: start of next object
229 // Calculate total properties described map.
230 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
231 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
232 __ addq(rdx, rcx);
233 // Calculate unused properties past the end of the in-object properties.
234 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
235 __ subq(rdx, rcx);
236 // Done if no extra properties are to be allocated.
237 __ j(zero, &allocated);
238 __ Assert(positive, "Property allocation count failed.");
239
240 // Scale the number of elements by pointer size and add the header for
241 // FixedArrays to the start of the next object calculation from above.
242 // rbx: JSObject
243 // rdi: start of next object (will be start of FixedArray)
244 // rdx: number of elements in properties array
245 __ AllocateInNewSpace(FixedArray::kHeaderSize,
246 times_pointer_size,
247 rdx,
248 rdi,
249 rax,
250 no_reg,
251 &undo_allocation,
252 RESULT_CONTAINS_TOP);
253
254 // Initialize the FixedArray.
255 // rbx: JSObject
256 // rdi: FixedArray
257 // rdx: number of elements
258 // rax: start of next object
259 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100260 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
261 __ Integer32ToSmi(rdx, rdx);
262 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
Steve Blocka7e24c12009-10-30 11:49:00 +0000263
264 // Initialize the fields to undefined.
265 // rbx: JSObject
266 // rdi: FixedArray
267 // rax: start of next object
268 // rdx: number of elements
269 { Label loop, entry;
270 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
271 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
272 __ jmp(&entry);
273 __ bind(&loop);
274 __ movq(Operand(rcx, 0), rdx);
275 __ addq(rcx, Immediate(kPointerSize));
276 __ bind(&entry);
277 __ cmpq(rcx, rax);
278 __ j(below, &loop);
279 }
280
281 // Store the initialized FixedArray into the properties field of
282 // the JSObject
283 // rbx: JSObject
284 // rdi: FixedArray
285 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
286 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
287
288
289 // Continue with JSObject being successfully allocated
290 // rbx: JSObject
291 __ jmp(&allocated);
292
293 // Undo the setting of the new top so that the heap is verifiable. For
294 // example, the map's unused properties potentially do not match the
295 // allocated objects unused properties.
296 // rbx: JSObject (previous new top)
297 __ bind(&undo_allocation);
298 __ UndoAllocationInNewSpace(rbx);
299 }
300
301 // Allocate the new receiver object using the runtime call.
302 // rdi: function (constructor)
303 __ bind(&rt_call);
304 // Must restore rdi (constructor) before calling runtime.
305 __ movq(rdi, Operand(rsp, 0));
306 __ push(rdi);
307 __ CallRuntime(Runtime::kNewObject, 1);
308 __ movq(rbx, rax); // store result in rbx
309
310 // New object allocated.
311 // rbx: newly allocated object
312 __ bind(&allocated);
313 // Retrieve the function from the stack.
314 __ pop(rdi);
315
316 // Retrieve smi-tagged arguments count from the stack.
317 __ movq(rax, Operand(rsp, 0));
318 __ SmiToInteger32(rax, rax);
319
320 // Push the allocated receiver to the stack. We need two copies
321 // because we may have to return the original one and the calling
322 // conventions dictate that the called function pops the receiver.
323 __ push(rbx);
324 __ push(rbx);
325
326 // Setup pointer to last argument.
327 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
328
329 // Copy arguments and receiver to the expression stack.
330 Label loop, entry;
331 __ movq(rcx, rax);
332 __ jmp(&entry);
333 __ bind(&loop);
334 __ push(Operand(rbx, rcx, times_pointer_size, 0));
335 __ bind(&entry);
336 __ decq(rcx);
337 __ j(greater_equal, &loop);
338
339 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +0000340 if (is_api_function) {
341 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
342 Handle<Code> code = Handle<Code>(
343 Builtins::builtin(Builtins::HandleApiCallConstruct));
344 ParameterCount expected(0);
345 __ InvokeCode(code, expected, expected,
346 RelocInfo::CODE_TARGET, CALL_FUNCTION);
347 } else {
348 ParameterCount actual(rax);
349 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
350 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000351
352 // Restore context from the frame.
353 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
354
355 // If the result is an object (in the ECMA sense), we should get rid
356 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
357 // on page 74.
358 Label use_receiver, exit;
359 // If the result is a smi, it is *not* an object in the ECMA sense.
360 __ JumpIfSmi(rax, &use_receiver);
361
362 // If the type of the result (stored in its map) is less than
363 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
364 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
365 __ j(above_equal, &exit);
366
367 // Throw away the result of the constructor invocation and use the
368 // on-stack receiver as the result.
369 __ bind(&use_receiver);
370 __ movq(rax, Operand(rsp, 0));
371
372 // Restore the arguments count and leave the construct frame.
373 __ bind(&exit);
374 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
375 __ LeaveConstructFrame();
376
377 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +0000378 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000379 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
380 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000381 __ push(rcx);
382 __ IncrementCounter(&Counters::constructed_objects, 1);
383 __ ret(0);
384}
385
386
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100387void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
388 Generate_JSConstructStubHelper(masm, false, true);
389}
390
391
Leon Clarkee46be812010-01-19 14:06:41 +0000392void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100393 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000394}
395
396
397void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100398 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000399}
400
401
Steve Blocka7e24c12009-10-30 11:49:00 +0000402static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
403 bool is_construct) {
404 // Expects five C++ function parameters.
405 // - Address entry (ignored)
406 // - JSFunction* function (
407 // - Object* receiver
408 // - int argc
409 // - Object*** argv
410 // (see Handle::Invoke in execution.cc).
411
412 // Platform specific argument handling. After this, the stack contains
413 // an internal frame and the pushed function and receiver, and
414 // register rax and rbx holds the argument count and argument array,
415 // while rdi holds the function pointer and rsi the context.
416#ifdef _WIN64
417 // MSVC parameters in:
418 // rcx : entry (ignored)
419 // rdx : function
420 // r8 : receiver
421 // r9 : argc
422 // [rsp+0x20] : argv
423
424 // Clear the context before we push it when entering the JS frame.
Steve Block9fac8402011-05-12 15:51:54 +0100425 __ Set(rsi, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000426 __ EnterInternalFrame();
427
428 // Load the function context into rsi.
429 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
430
431 // Push the function and the receiver onto the stack.
432 __ push(rdx);
433 __ push(r8);
434
435 // Load the number of arguments and setup pointer to the arguments.
436 __ movq(rax, r9);
437 // Load the previous frame pointer to access C argument on stack
438 __ movq(kScratchRegister, Operand(rbp, 0));
439 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
440 // Load the function pointer into rdi.
441 __ movq(rdi, rdx);
Steve Block6ded16b2010-05-10 14:33:55 +0100442#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +0000443 // GCC parameters in:
444 // rdi : entry (ignored)
445 // rsi : function
446 // rdx : receiver
447 // rcx : argc
448 // r8 : argv
449
450 __ movq(rdi, rsi);
451 // rdi : function
452
453 // Clear the context before we push it when entering the JS frame.
Steve Block9fac8402011-05-12 15:51:54 +0100454 __ Set(rsi, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000455 // Enter an internal frame.
456 __ EnterInternalFrame();
457
458 // Push the function and receiver and setup the context.
459 __ push(rdi);
460 __ push(rdx);
461 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
462
463 // Load the number of arguments and setup pointer to the arguments.
464 __ movq(rax, rcx);
465 __ movq(rbx, r8);
466#endif // _WIN64
467
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 // Current stack contents:
469 // [rsp + 2 * kPointerSize ... ]: Internal frame
470 // [rsp + kPointerSize] : function
471 // [rsp] : receiver
472 // Current register contents:
473 // rax : argc
474 // rbx : argv
475 // rsi : context
476 // rdi : function
477
478 // Copy arguments to the stack in a loop.
479 // Register rbx points to array of pointers to handle locations.
480 // Push the values of these handles.
481 Label loop, entry;
Steve Block9fac8402011-05-12 15:51:54 +0100482 __ Set(rcx, 0); // Set loop variable to 0.
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 __ jmp(&entry);
484 __ bind(&loop);
485 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
486 __ push(Operand(kScratchRegister, 0)); // dereference handle
487 __ addq(rcx, Immediate(1));
488 __ bind(&entry);
489 __ cmpq(rcx, rax);
490 __ j(not_equal, &loop);
491
492 // Invoke the code.
493 if (is_construct) {
494 // Expects rdi to hold function pointer.
495 __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
496 RelocInfo::CODE_TARGET);
497 } else {
498 ParameterCount actual(rax);
499 // Function must be in rdi.
500 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
501 }
502
503 // Exit the JS frame. Notice that this also removes the empty
504 // context and the function left on the stack by the code
505 // invocation.
506 __ LeaveInternalFrame();
507 // TODO(X64): Is argument correct? Is there a receiver to remove?
508 __ ret(1 * kPointerSize); // remove receiver
509}
510
511
512void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
513 Generate_JSEntryTrampolineHelper(masm, false);
514}
515
516
517void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
518 Generate_JSEntryTrampolineHelper(masm, true);
519}
520
Iain Merrick75681382010-08-19 15:07:18 +0100521
522void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
523 // Enter an internal frame.
524 __ EnterInternalFrame();
525
526 // Push a copy of the function onto the stack.
527 __ push(rdi);
528
529 __ push(rdi); // Function is also the parameter to the runtime call.
530 __ CallRuntime(Runtime::kLazyCompile, 1);
531 __ pop(rdi);
532
533 // Tear down temporary frame.
534 __ LeaveInternalFrame();
535
536 // Do a tail-call of the compiled function.
537 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
538 __ jmp(rcx);
539}
540
Ben Murdochb0fe1622011-05-05 13:52:32 +0100541
542void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
543 // Enter an internal frame.
544 __ EnterInternalFrame();
545
546 // Push a copy of the function onto the stack.
547 __ push(rdi);
548
549 __ push(rdi); // Function is also the parameter to the runtime call.
550 __ CallRuntime(Runtime::kLazyRecompile, 1);
551
552 // Restore function and tear down temporary frame.
553 __ pop(rdi);
554 __ LeaveInternalFrame();
555
556 // Do a tail-call of the compiled function.
557 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
558 __ jmp(rcx);
559}
560
561
562static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
563 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +0100564 // Enter an internal frame.
565 __ EnterInternalFrame();
566
567 // Pass the deoptimization type to the runtime system.
568 __ Push(Smi::FromInt(static_cast<int>(type)));
569
570 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
571 // Tear down temporary frame.
572 __ LeaveInternalFrame();
573
574 // Get the full codegen state from the stack and untag it.
575 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
576
577 // Switch on the state.
578 NearLabel not_no_registers, not_tos_rax;
579 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
580 __ j(not_equal, &not_no_registers);
581 __ ret(1 * kPointerSize); // Remove state.
582
583 __ bind(&not_no_registers);
584 __ movq(rax, Operand(rsp, 2 * kPointerSize));
585 __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
586 __ j(not_equal, &not_tos_rax);
587 __ ret(2 * kPointerSize); // Remove state, rax.
588
589 __ bind(&not_tos_rax);
590 __ Abort("no cases left");
Ben Murdochb0fe1622011-05-05 13:52:32 +0100591}
592
593void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
594 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
595}
596
597
598void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100599 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100600}
601
602
603void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100604 // For now, we are relying on the fact that Runtime::NotifyOSR
605 // doesn't do any garbage collection which allows us to save/restore
606 // the registers without worrying about which of them contain
607 // pointers. This seems a bit fragile.
608 __ Pushad();
609 __ EnterInternalFrame();
610 __ CallRuntime(Runtime::kNotifyOSR, 0);
611 __ LeaveInternalFrame();
612 __ Popad();
613 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100614}
615
616
617void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
618 // Stack Layout:
619 // rsp[0]: Return address
620 // rsp[1]: Argument n
621 // rsp[2]: Argument n-1
622 // ...
623 // rsp[n]: Argument 1
624 // rsp[n+1]: Receiver (function to call)
625 //
626 // rax contains the number of arguments, n, not counting the receiver.
627 //
628 // 1. Make sure we have at least one argument.
629 { Label done;
630 __ testq(rax, rax);
631 __ j(not_zero, &done);
632 __ pop(rbx);
633 __ Push(Factory::undefined_value());
634 __ push(rbx);
635 __ incq(rax);
636 __ bind(&done);
637 }
638
639 // 2. Get the function to call (passed as receiver) from the stack, check
640 // if it is a function.
641 Label non_function;
642 // The function to call is at position n+1 on the stack.
643 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
644 __ JumpIfSmi(rdi, &non_function);
645 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
646 __ j(not_equal, &non_function);
647
648 // 3a. Patch the first argument if necessary when calling a function.
649 Label shift_arguments;
650 { Label convert_to_object, use_global_receiver, patch_receiver;
651 // Change context eagerly in case we need the global receiver.
652 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
653
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100654 // Do not transform the receiver for strict mode functions.
655 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
656 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
657 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
658 __ j(not_equal, &shift_arguments);
659
660 // Compute the receiver in non-strict mode.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100661 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
662 __ JumpIfSmi(rbx, &convert_to_object);
663
664 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
665 __ j(equal, &use_global_receiver);
666 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
667 __ j(equal, &use_global_receiver);
668
669 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
670 __ j(below, &convert_to_object);
671 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
672 __ j(below_equal, &shift_arguments);
673
674 __ bind(&convert_to_object);
675 __ EnterInternalFrame(); // In order to preserve argument count.
676 __ Integer32ToSmi(rax, rax);
677 __ push(rax);
678
679 __ push(rbx);
680 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
681 __ movq(rbx, rax);
682
683 __ pop(rax);
684 __ SmiToInteger32(rax, rax);
685 __ LeaveInternalFrame();
686 // Restore the function to rdi.
687 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
688 __ jmp(&patch_receiver);
689
690 // Use the global receiver object from the called function as the
691 // receiver.
692 __ bind(&use_global_receiver);
693 const int kGlobalIndex =
694 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
695 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
696 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
697 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
698 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
699
700 __ bind(&patch_receiver);
701 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
702
703 __ jmp(&shift_arguments);
704 }
705
706
707 // 3b. Patch the first argument when calling a non-function. The
708 // CALL_NON_FUNCTION builtin expects the non-function callee as
709 // receiver, so overwrite the first argument which will ultimately
710 // become the receiver.
711 __ bind(&non_function);
712 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
Steve Block9fac8402011-05-12 15:51:54 +0100713 __ Set(rdi, 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100714
715 // 4. Shift arguments and return address one slot down on the stack
716 // (overwriting the original receiver). Adjust argument count to make
717 // the original first argument the new receiver.
718 __ bind(&shift_arguments);
719 { Label loop;
720 __ movq(rcx, rax);
721 __ bind(&loop);
722 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
723 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
724 __ decq(rcx);
725 __ j(not_sign, &loop); // While non-negative (to copy return address).
726 __ pop(rbx); // Discard copy of return address.
727 __ decq(rax); // One fewer argument (first argument is new receiver).
728 }
729
730 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
731 { Label function;
732 __ testq(rdi, rdi);
733 __ j(not_zero, &function);
Steve Block9fac8402011-05-12 15:51:54 +0100734 __ Set(rbx, 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100735 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
736 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
737 RelocInfo::CODE_TARGET);
738 __ bind(&function);
739 }
740
741 // 5b. Get the code to call from the function and check that the number of
742 // expected arguments matches what we're providing. If so, jump
743 // (tail-call) to the code in register edx without checking arguments.
744 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
745 __ movsxlq(rbx,
746 FieldOperand(rdx,
747 SharedFunctionInfo::kFormalParameterCountOffset));
748 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
749 __ cmpq(rax, rbx);
750 __ j(not_equal,
751 Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
752 RelocInfo::CODE_TARGET);
753
754 ParameterCount expected(0);
755 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
756}
757
758
759void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
760 // Stack at entry:
761 // rsp: return address
762 // rsp+8: arguments
763 // rsp+16: receiver ("this")
764 // rsp+24: function
765 __ EnterInternalFrame();
766 // Stack frame:
767 // rbp: Old base pointer
768 // rbp[1]: return address
769 // rbp[2]: function arguments
770 // rbp[3]: receiver
771 // rbp[4]: function
772 static const int kArgumentsOffset = 2 * kPointerSize;
773 static const int kReceiverOffset = 3 * kPointerSize;
774 static const int kFunctionOffset = 4 * kPointerSize;
775 __ push(Operand(rbp, kFunctionOffset));
776 __ push(Operand(rbp, kArgumentsOffset));
777 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
778
779 // Check the stack for overflow. We are not trying need to catch
780 // interruptions (e.g. debug break and preemption) here, so the "real stack
781 // limit" is checked.
782 Label okay;
783 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
784 __ movq(rcx, rsp);
785 // Make rcx the space we have left. The stack might already be overflowed
786 // here which will cause rcx to become negative.
787 __ subq(rcx, kScratchRegister);
788 // Make rdx the space we need for the array when it is unrolled onto the
789 // stack.
790 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
791 // Check if the arguments will overflow the stack.
792 __ cmpq(rcx, rdx);
793 __ j(greater, &okay); // Signed comparison.
794
795 // Out of stack space.
796 __ push(Operand(rbp, kFunctionOffset));
797 __ push(rax);
798 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
799 __ bind(&okay);
800 // End of stack check.
801
802 // Push current index and limit.
803 const int kLimitOffset =
804 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
805 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
806 __ push(rax); // limit
807 __ push(Immediate(0)); // index
808
809 // Change context eagerly to get the right global object if
810 // necessary.
811 __ movq(rdi, Operand(rbp, kFunctionOffset));
812 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
813
814 // Compute the receiver.
815 Label call_to_object, use_global_receiver, push_receiver;
816 __ movq(rbx, Operand(rbp, kReceiverOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100817
818 // Do not transform the receiver for strict mode functions.
819 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
820 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
821 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
822 __ j(not_equal, &push_receiver);
823
824 // Compute the receiver in non-strict mode.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100825 __ JumpIfSmi(rbx, &call_to_object);
826 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
827 __ j(equal, &use_global_receiver);
828 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
829 __ j(equal, &use_global_receiver);
830
831 // If given receiver is already a JavaScript object then there's no
832 // reason for converting it.
833 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
834 __ j(below, &call_to_object);
835 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
836 __ j(below_equal, &push_receiver);
837
838 // Convert the receiver to an object.
839 __ bind(&call_to_object);
840 __ push(rbx);
841 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
842 __ movq(rbx, rax);
843 __ jmp(&push_receiver);
844
845 // Use the current global receiver object as the receiver.
846 __ bind(&use_global_receiver);
847 const int kGlobalOffset =
848 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
849 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
850 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
851 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
852 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
853
854 // Push the receiver.
855 __ bind(&push_receiver);
856 __ push(rbx);
857
858 // Copy all arguments from the array to the stack.
859 Label entry, loop;
860 __ movq(rax, Operand(rbp, kIndexOffset));
861 __ jmp(&entry);
862 __ bind(&loop);
863 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
864
865 // Use inline caching to speed up access to arguments.
866 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
867 __ Call(ic, RelocInfo::CODE_TARGET);
868 // It is important that we do not have a test instruction after the
869 // call. A test instruction after the call is used to indicate that
870 // we have generated an inline version of the keyed load. In this
871 // case, we know that we are not generating a test instruction next.
872
873 // Push the nth argument.
874 __ push(rax);
875
876 // Update the index on the stack and in register rax.
877 __ movq(rax, Operand(rbp, kIndexOffset));
878 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
879 __ movq(Operand(rbp, kIndexOffset), rax);
880
881 __ bind(&entry);
882 __ cmpq(rax, Operand(rbp, kLimitOffset));
883 __ j(not_equal, &loop);
884
885 // Invoke the function.
886 ParameterCount actual(rax);
887 __ SmiToInteger32(rax, rax);
888 __ movq(rdi, Operand(rbp, kFunctionOffset));
889 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
890
891 __ LeaveInternalFrame();
892 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
893}
894
895
896// Number of empty elements to allocate for an empty array.
897static const int kPreallocatedArrayElements = 4;
898
899
900// Allocate an empty JSArray. The allocated array is put into the result
901// register. If the parameter initial_capacity is larger than zero an elements
902// backing store is allocated with this size and filled with the hole values.
903// Otherwise the elements backing store is set to the empty FixedArray.
904static void AllocateEmptyJSArray(MacroAssembler* masm,
905 Register array_function,
906 Register result,
907 Register scratch1,
908 Register scratch2,
909 Register scratch3,
910 int initial_capacity,
911 Label* gc_required) {
912 ASSERT(initial_capacity >= 0);
913
914 // Load the initial map from the array function.
915 __ movq(scratch1, FieldOperand(array_function,
916 JSFunction::kPrototypeOrInitialMapOffset));
917
918 // Allocate the JSArray object together with space for a fixed array with the
919 // requested elements.
920 int size = JSArray::kSize;
921 if (initial_capacity > 0) {
922 size += FixedArray::SizeFor(initial_capacity);
923 }
924 __ AllocateInNewSpace(size,
925 result,
926 scratch2,
927 scratch3,
928 gc_required,
929 TAG_OBJECT);
930
931 // Allocated the JSArray. Now initialize the fields except for the elements
932 // array.
933 // result: JSObject
934 // scratch1: initial map
935 // scratch2: start of next object
936 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
937 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
938 Factory::empty_fixed_array());
939 // Field JSArray::kElementsOffset is initialized later.
940 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
941
942 // If no storage is requested for the elements array just set the empty
943 // fixed array.
944 if (initial_capacity == 0) {
945 __ Move(FieldOperand(result, JSArray::kElementsOffset),
946 Factory::empty_fixed_array());
947 return;
948 }
949
950 // Calculate the location of the elements array and set elements array member
951 // of the JSArray.
952 // result: JSObject
953 // scratch2: start of next object
954 __ lea(scratch1, Operand(result, JSArray::kSize));
955 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
956
957 // Initialize the FixedArray and fill it with holes. FixedArray length is
958 // stored as a smi.
959 // result: JSObject
960 // scratch1: elements array
961 // scratch2: start of next object
962 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
963 Factory::fixed_array_map());
964 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
965 Smi::FromInt(initial_capacity));
966
967 // Fill the FixedArray with the hole value. Inline the code if short.
968 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
969 static const int kLoopUnfoldLimit = 4;
970 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
971 __ Move(scratch3, Factory::the_hole_value());
972 if (initial_capacity <= kLoopUnfoldLimit) {
973 // Use a scratch register here to have only one reloc info when unfolding
974 // the loop.
975 for (int i = 0; i < initial_capacity; i++) {
976 __ movq(FieldOperand(scratch1,
977 FixedArray::kHeaderSize + i * kPointerSize),
978 scratch3);
979 }
980 } else {
981 Label loop, entry;
982 __ jmp(&entry);
983 __ bind(&loop);
984 __ movq(Operand(scratch1, 0), scratch3);
985 __ addq(scratch1, Immediate(kPointerSize));
986 __ bind(&entry);
987 __ cmpq(scratch1, scratch2);
988 __ j(below, &loop);
989 }
990}
991
992
993// Allocate a JSArray with the number of elements stored in a register. The
994// register array_function holds the built-in Array function and the register
995// array_size holds the size of the array as a smi. The allocated array is put
996// into the result register and beginning and end of the FixedArray elements
997// storage is put into registers elements_array and elements_array_end (see
998// below for when that is not the case). If the parameter fill_with_holes is
999// true the allocated elements backing store is filled with the hole values
1000// otherwise it is left uninitialized. When the backing store is filled the
1001// register elements_array is scratched.
1002static void AllocateJSArray(MacroAssembler* masm,
1003 Register array_function, // Array function.
1004 Register array_size, // As a smi.
1005 Register result,
1006 Register elements_array,
1007 Register elements_array_end,
1008 Register scratch,
1009 bool fill_with_hole,
1010 Label* gc_required) {
1011 Label not_empty, allocated;
1012
1013 // Load the initial map from the array function.
1014 __ movq(elements_array,
1015 FieldOperand(array_function,
1016 JSFunction::kPrototypeOrInitialMapOffset));
1017
1018 // Check whether an empty sized array is requested.
1019 __ testq(array_size, array_size);
1020 __ j(not_zero, &not_empty);
1021
1022 // If an empty array is requested allocate a small elements array anyway. This
1023 // keeps the code below free of special casing for the empty array.
1024 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
1025 __ AllocateInNewSpace(size,
1026 result,
1027 elements_array_end,
1028 scratch,
1029 gc_required,
1030 TAG_OBJECT);
1031 __ jmp(&allocated);
1032
1033 // Allocate the JSArray object together with space for a FixedArray with the
1034 // requested elements.
1035 __ bind(&not_empty);
1036 SmiIndex index =
1037 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
1038 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1039 index.scale,
1040 index.reg,
1041 result,
1042 elements_array_end,
1043 scratch,
1044 gc_required,
1045 TAG_OBJECT);
1046
1047 // Allocated the JSArray. Now initialize the fields except for the elements
1048 // array.
1049 // result: JSObject
1050 // elements_array: initial map
1051 // elements_array_end: start of next object
1052 // array_size: size of array (smi)
1053 __ bind(&allocated);
1054 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
1055 __ Move(elements_array, Factory::empty_fixed_array());
1056 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1057 // Field JSArray::kElementsOffset is initialized later.
1058 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1059
1060 // Calculate the location of the elements array and set elements array member
1061 // of the JSArray.
1062 // result: JSObject
1063 // elements_array_end: start of next object
1064 // array_size: size of array (smi)
1065 __ lea(elements_array, Operand(result, JSArray::kSize));
1066 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1067
1068 // Initialize the fixed array. FixedArray length is stored as a smi.
1069 // result: JSObject
1070 // elements_array: elements array
1071 // elements_array_end: start of next object
1072 // array_size: size of array (smi)
1073 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
1074 Factory::fixed_array_map());
1075 Label not_empty_2, fill_array;
1076 __ SmiTest(array_size);
1077 __ j(not_zero, &not_empty_2);
1078 // Length of the FixedArray is the number of pre-allocated elements even
1079 // though the actual JSArray has length 0.
1080 __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
1081 Smi::FromInt(kPreallocatedArrayElements));
1082 __ jmp(&fill_array);
1083 __ bind(&not_empty_2);
1084 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1085 // same.
1086 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1087
1088 // Fill the allocated FixedArray with the hole value if requested.
1089 // result: JSObject
1090 // elements_array: elements array
1091 // elements_array_end: start of next object
1092 __ bind(&fill_array);
1093 if (fill_with_hole) {
1094 Label loop, entry;
1095 __ Move(scratch, Factory::the_hole_value());
1096 __ lea(elements_array, Operand(elements_array,
1097 FixedArray::kHeaderSize - kHeapObjectTag));
1098 __ jmp(&entry);
1099 __ bind(&loop);
1100 __ movq(Operand(elements_array, 0), scratch);
1101 __ addq(elements_array, Immediate(kPointerSize));
1102 __ bind(&entry);
1103 __ cmpq(elements_array, elements_array_end);
1104 __ j(below, &loop);
1105 }
1106}
1107
1108
1109// Create a new array for the built-in Array function. This function allocates
1110// the JSArray object and the FixedArray elements array and initializes these.
1111// If the Array cannot be constructed in native code the runtime is called. This
1112// function assumes the following state:
1113// rdi: constructor (built-in Array function)
1114// rax: argc
1115// rsp[0]: return address
1116// rsp[8]: last argument
1117// This function is used for both construct and normal calls of Array. The only
1118// difference between handling a construct call and a normal call is that for a
1119// construct call the constructor function in rdi needs to be preserved for
1120// entering the generic code. In both cases argc in rax needs to be preserved.
1121// Both registers are preserved by this code so no need to differentiate between
1122// a construct call and a normal call.
1123static void ArrayNativeCode(MacroAssembler* masm,
1124 Label *call_generic_code) {
1125 Label argc_one_or_more, argc_two_or_more;
1126
1127 // Check for array construction with zero arguments.
1128 __ testq(rax, rax);
1129 __ j(not_zero, &argc_one_or_more);
1130
1131 // Handle construction of an empty array.
1132 AllocateEmptyJSArray(masm,
1133 rdi,
1134 rbx,
1135 rcx,
1136 rdx,
1137 r8,
1138 kPreallocatedArrayElements,
1139 call_generic_code);
1140 __ IncrementCounter(&Counters::array_function_native, 1);
1141 __ movq(rax, rbx);
1142 __ ret(kPointerSize);
1143
1144 // Check for one argument. Bail out if argument is not smi or if it is
1145 // negative.
1146 __ bind(&argc_one_or_more);
1147 __ cmpq(rax, Immediate(1));
1148 __ j(not_equal, &argc_two_or_more);
1149 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
1150 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1151
1152 // Handle construction of an empty array of a certain size. Bail out if size
1153 // is to large to actually allocate an elements array.
1154 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
1155 __ j(greater_equal, call_generic_code);
1156
1157 // rax: argc
1158 // rdx: array_size (smi)
1159 // rdi: constructor
1160 // esp[0]: return address
1161 // esp[8]: argument
1162 AllocateJSArray(masm,
1163 rdi,
1164 rdx,
1165 rbx,
1166 rcx,
1167 r8,
1168 r9,
1169 true,
1170 call_generic_code);
1171 __ IncrementCounter(&Counters::array_function_native, 1);
1172 __ movq(rax, rbx);
1173 __ ret(2 * kPointerSize);
1174
1175 // Handle construction of an array from a list of arguments.
1176 __ bind(&argc_two_or_more);
1177 __ movq(rdx, rax);
1178 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1179 // rax: argc
1180 // rdx: array_size (smi)
1181 // rdi: constructor
1182 // esp[0] : return address
1183 // esp[8] : last argument
1184 AllocateJSArray(masm,
1185 rdi,
1186 rdx,
1187 rbx,
1188 rcx,
1189 r8,
1190 r9,
1191 false,
1192 call_generic_code);
1193 __ IncrementCounter(&Counters::array_function_native, 1);
1194
1195 // rax: argc
1196 // rbx: JSArray
1197 // rcx: elements_array
1198 // r8: elements_array_end (untagged)
1199 // esp[0]: return address
1200 // esp[8]: last argument
1201
1202 // Location of the last argument
1203 __ lea(r9, Operand(rsp, kPointerSize));
1204
1205 // Location of the first array element (Parameter fill_with_holes to
1206 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1207 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1208
1209 // rax: argc
1210 // rbx: JSArray
1211 // rdx: location of the first array element
1212 // r9: location of the last argument
1213 // esp[0]: return address
1214 // esp[8]: last argument
1215 Label loop, entry;
1216 __ movq(rcx, rax);
1217 __ jmp(&entry);
1218 __ bind(&loop);
1219 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
1220 __ movq(Operand(rdx, 0), kScratchRegister);
1221 __ addq(rdx, Immediate(kPointerSize));
1222 __ bind(&entry);
1223 __ decq(rcx);
1224 __ j(greater_equal, &loop);
1225
1226 // Remove caller arguments from the stack and return.
1227 // rax: argc
1228 // rbx: JSArray
1229 // esp[0]: return address
1230 // esp[8]: last argument
1231 __ pop(rcx);
1232 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1233 __ push(rcx);
1234 __ movq(rax, rbx);
1235 __ ret(0);
1236}
1237
1238
1239void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1240 // ----------- S t a t e -------------
1241 // -- rax : argc
1242 // -- rsp[0] : return address
1243 // -- rsp[8] : last argument
1244 // -----------------------------------
1245 Label generic_array_code;
1246
1247 // Get the Array function.
1248 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1249
1250 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001251 // Initial map for the builtin Array functions should be maps.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001252 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1253 // Will both indicate a NULL and a Smi.
1254 ASSERT(kSmiTag == 0);
1255 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1256 __ Check(not_smi, "Unexpected initial map for Array function");
1257 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1258 __ Check(equal, "Unexpected initial map for Array function");
1259 }
1260
1261 // Run the native code for the Array function called as a normal function.
1262 ArrayNativeCode(masm, &generic_array_code);
1263
1264 // Jump to the generic array code in case the specialized code cannot handle
1265 // the construction.
1266 __ bind(&generic_array_code);
1267 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
1268 Handle<Code> array_code(code);
1269 __ Jump(array_code, RelocInfo::CODE_TARGET);
1270}
1271
1272
1273void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1274 // ----------- S t a t e -------------
1275 // -- rax : argc
1276 // -- rdi : constructor
1277 // -- rsp[0] : return address
1278 // -- rsp[8] : last argument
1279 // -----------------------------------
1280 Label generic_constructor;
1281
1282 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001283 // The array construct code is only set for the builtin and internal
1284 // Array functions which always have a map.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001285 // Initial map for the builtin Array function should be a map.
1286 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1287 // Will both indicate a NULL and a Smi.
1288 ASSERT(kSmiTag == 0);
1289 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1290 __ Check(not_smi, "Unexpected initial map for Array function");
1291 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1292 __ Check(equal, "Unexpected initial map for Array function");
1293 }
1294
1295 // Run the native code for the Array function called as constructor.
1296 ArrayNativeCode(masm, &generic_constructor);
1297
1298 // Jump to the generic construct code in case the specialized code cannot
1299 // handle the construction.
1300 __ bind(&generic_constructor);
1301 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1302 Handle<Code> generic_construct_stub(code);
1303 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1304}
1305
1306
1307void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1308 // TODO(849): implement custom construct stub.
1309 // Generate a copy of the generic stub for now.
1310 Generate_JSConstructStubGeneric(masm);
1311}
1312
1313
1314static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1315 __ push(rbp);
1316 __ movq(rbp, rsp);
1317
1318 // Store the arguments adaptor context sentinel.
1319 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1320
1321 // Push the function on the stack.
1322 __ push(rdi);
1323
1324 // Preserve the number of arguments on the stack. Must preserve both
1325 // rax and rbx because these registers are used when copying the
1326 // arguments and the receiver.
1327 __ Integer32ToSmi(rcx, rax);
1328 __ push(rcx);
1329}
1330
1331
1332static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1333 // Retrieve the number of arguments from the stack. Number is a Smi.
1334 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1335
1336 // Leave the frame.
1337 __ movq(rsp, rbp);
1338 __ pop(rbp);
1339
1340 // Remove caller arguments from the stack.
1341 __ pop(rcx);
1342 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1343 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1344 __ push(rcx);
1345}
1346
1347
1348void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1349 // ----------- S t a t e -------------
1350 // -- rax : actual number of arguments
1351 // -- rbx : expected number of arguments
1352 // -- rdx : code entry to call
1353 // -----------------------------------
1354
1355 Label invoke, dont_adapt_arguments;
1356 __ IncrementCounter(&Counters::arguments_adaptors, 1);
1357
1358 Label enough, too_few;
1359 __ cmpq(rax, rbx);
1360 __ j(less, &too_few);
1361 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1362 __ j(equal, &dont_adapt_arguments);
1363
1364 { // Enough parameters: Actual >= expected.
1365 __ bind(&enough);
1366 EnterArgumentsAdaptorFrame(masm);
1367
1368 // Copy receiver and all expected arguments.
1369 const int offset = StandardFrameConstants::kCallerSPOffset;
1370 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1371 __ movq(rcx, Immediate(-1)); // account for receiver
1372
1373 Label copy;
1374 __ bind(&copy);
1375 __ incq(rcx);
1376 __ push(Operand(rax, 0));
1377 __ subq(rax, Immediate(kPointerSize));
1378 __ cmpq(rcx, rbx);
1379 __ j(less, &copy);
1380 __ jmp(&invoke);
1381 }
1382
1383 { // Too few parameters: Actual < expected.
1384 __ bind(&too_few);
1385 EnterArgumentsAdaptorFrame(masm);
1386
1387 // Copy receiver and all actual arguments.
1388 const int offset = StandardFrameConstants::kCallerSPOffset;
1389 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1390 __ movq(rcx, Immediate(-1)); // account for receiver
1391
1392 Label copy;
1393 __ bind(&copy);
1394 __ incq(rcx);
1395 __ push(Operand(rdi, 0));
1396 __ subq(rdi, Immediate(kPointerSize));
1397 __ cmpq(rcx, rax);
1398 __ j(less, &copy);
1399
1400 // Fill remaining expected arguments with undefined values.
1401 Label fill;
1402 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1403 __ bind(&fill);
1404 __ incq(rcx);
1405 __ push(kScratchRegister);
1406 __ cmpq(rcx, rbx);
1407 __ j(less, &fill);
1408
1409 // Restore function pointer.
1410 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1411 }
1412
1413 // Call the entry point.
1414 __ bind(&invoke);
1415 __ call(rdx);
1416
1417 // Leave frame and return.
1418 LeaveArgumentsAdaptorFrame(masm);
1419 __ ret(0);
1420
1421 // -------------------------------------------
1422 // Dont adapt arguments.
1423 // -------------------------------------------
1424 __ bind(&dont_adapt_arguments);
1425 __ jmp(rdx);
1426}
1427
1428
1429void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001430 // Get the loop depth of the stack guard check. This is recorded in
1431 // a test(rax, depth) instruction right after the call.
1432 Label stack_check;
1433 __ movq(rbx, Operand(rsp, 0)); // return address
1434 __ movzxbq(rbx, Operand(rbx, 1)); // depth
1435
1436 // Get the loop nesting level at which we allow OSR from the
1437 // unoptimized code and check if we want to do OSR yet. If not we
1438 // should perform a stack guard check so we can get interrupts while
1439 // waiting for on-stack replacement.
1440 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1441 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1442 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
1443 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
1444 __ j(greater, &stack_check);
1445
1446 // Pass the function to optimize as the argument to the on-stack
1447 // replacement runtime function.
1448 __ EnterInternalFrame();
1449 __ push(rax);
1450 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1451 __ LeaveInternalFrame();
1452
1453 // If the result was -1 it means that we couldn't optimize the
1454 // function. Just return and continue in the unoptimized version.
1455 NearLabel skip;
1456 __ SmiCompare(rax, Smi::FromInt(-1));
1457 __ j(not_equal, &skip);
1458 __ ret(0);
1459
1460 // If we decide not to perform on-stack replacement we perform a
1461 // stack guard check to enable interrupts.
1462 __ bind(&stack_check);
1463 NearLabel ok;
1464 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1465 __ j(above_equal, &ok);
1466
1467 StackCheckStub stub;
1468 __ TailCallStub(&stub);
1469 __ Abort("Unreachable code: returned from tail call.");
1470 __ bind(&ok);
1471 __ ret(0);
1472
1473 __ bind(&skip);
1474 // Untag the AST id and push it on the stack.
1475 __ SmiToInteger32(rax, rax);
1476 __ push(rax);
1477
1478 // Generate the code for doing the frame-to-frame translation using
1479 // the deoptimizer infrastructure.
1480 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1481 generator.Generate();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001482}
1483
1484
1485#undef __
1486
Steve Blocka7e24c12009-10-30 11:49:00 +00001487} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001488
1489#endif // V8_TARGET_ARCH_X64