blob: e423ae3a4f6aad6acbf048f974c959fd9718e04c [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010029
30#if defined(V8_TARGET_ARCH_X64)
31
Ben Murdoch8b112d22011-06-08 16:22:53 +010032#include "codegen.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010033#include "deoptimizer.h"
34#include "full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Ben Murdochb0fe1622011-05-05 13:52:32 +010039
Steve Blocka7e24c12009-10-30 11:49:00 +000040#define __ ACCESS_MASM(masm)
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042
Leon Clarkee46be812010-01-19 14:06:41 +000043void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id,
45 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e -------------
47 // -- rax : number of arguments excluding receiver
48 // -- rdi : called function (only guaranteed when
49 // extra_args requires it)
50 // -- rsi : context
51 // -- rsp[0] : return address
52 // -- rsp[8] : last argument
53 // -- ...
54 // -- rsp[8 * argc] : first argument (argc == rax)
55 // -- rsp[8 * (argc +1)] : receiver
56 // -----------------------------------
57
58 // Insert extra arguments.
59 int num_extra_args = 0;
60 if (extra_args == NEEDS_CALLED_FUNCTION) {
61 num_extra_args = 1;
62 __ pop(kScratchRegister); // Save return address.
63 __ push(rdi);
64 __ push(kScratchRegister); // Restore return address.
65 } else {
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67 }
68
Steve Block6ded16b2010-05-10 14:33:55 +010069 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000070 // including the receiver and the extra arguments.
71 __ addq(rax, Immediate(num_extra_args + 1));
Steve Block44f0eee2011-05-26 01:26:41 +010072 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000073}
74
75
Steve Blocka7e24c12009-10-30 11:49:00 +000076void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
77 // ----------- S t a t e -------------
78 // -- rax: number of arguments
79 // -- rdi: constructor function
80 // -----------------------------------
81
Ben Murdoch592a9fc2012-03-05 11:04:45 +000082 Label slow, non_function_call;
Steve Blocka7e24c12009-10-30 11:49:00 +000083 // Check that function is not a smi.
84 __ JumpIfSmi(rdi, &non_function_call);
85 // Check that function is a JSFunction.
86 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
Ben Murdoch592a9fc2012-03-05 11:04:45 +000087 __ j(not_equal, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000088
89 // Jump to the function-specific construct stub.
90 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
91 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
92 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
93 __ jmp(rbx);
94
Kristian Monsen50ef84f2010-07-29 15:18:00 +010095 // rdi: called object
96 // rax: number of arguments
Ben Murdoch592a9fc2012-03-05 11:04:45 +000097 // rcx: object map
98 Label do_call;
99 __ bind(&slow);
100 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
101 __ j(not_equal, &non_function_call);
102 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
103 __ jmp(&do_call);
104
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 __ bind(&non_function_call);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000106 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
107 __ bind(&do_call);
Andrei Popescu402d9372010-02-26 13:31:12 +0000108 // Set expected number of arguments to zero (not changing rax).
Ben Murdoch8b112d22011-06-08 16:22:53 +0100109 __ Set(rbx, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000110 __ SetCallKind(rcx, CALL_AS_METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +0100111 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Steve Blocka7e24c12009-10-30 11:49:00 +0000112 RelocInfo::CODE_TARGET);
113}
114
115
Leon Clarkee46be812010-01-19 14:06:41 +0000116static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100117 bool is_api_function,
118 bool count_constructions) {
119 // Should never count constructions for api objects.
120 ASSERT(!is_api_function || !count_constructions);
121
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000122 // Enter a construct frame.
123 {
124 FrameScope scope(masm, StackFrame::CONSTRUCT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000125
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000126 // Store a smi-tagged arguments count on the stack.
127 __ Integer32ToSmi(rax, rax);
128 __ push(rax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000129
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000130 // Push the function to invoke on the stack.
131 __ push(rdi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000132
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000133 // Try to allocate the object without transitioning into C code. If any of
134 // the preconditions is not met, the code bails out to the runtime call.
135 Label rt_call, allocated;
136 if (FLAG_inline_new) {
137 Label undo_allocation;
Steve Blocka7e24c12009-10-30 11:49:00 +0000138
139#ifdef ENABLE_DEBUGGER_SUPPORT
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000140 ExternalReference debug_step_in_fp =
141 ExternalReference::debug_step_in_fp_address(masm->isolate());
142 __ movq(kScratchRegister, debug_step_in_fp);
143 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
144 __ j(not_equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000145#endif
146
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000147 // Verified that the constructor is a JSFunction.
148 // Load the initial map and verify that it is in fact a map.
149 // rdi: constructor
150 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
151 // Will both indicate a NULL and a Smi
152 ASSERT(kSmiTag == 0);
153 __ JumpIfSmi(rax, &rt_call);
154 // rdi: constructor
155 // rax: initial map (if proven valid below)
156 __ CmpObjectType(rax, MAP_TYPE, rbx);
157 __ j(not_equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000158
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000159 // Check that the constructor is not constructing a JSFunction (see
160 // comments in Runtime_NewObject in runtime.cc). In which case the
161 // initial map's instance type would be JS_FUNCTION_TYPE.
162 // rdi: constructor
163 // rax: initial map
164 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
165 __ j(equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000166
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100167 if (count_constructions) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000168 Label allocate;
169 // Decrease generous allocation count.
170 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
171 __ decb(FieldOperand(rcx,
172 SharedFunctionInfo::kConstructionCountOffset));
173 __ j(not_zero, &allocate);
174
175 __ push(rax);
176 __ push(rdi);
177
178 __ push(rdi); // constructor
179 // The call will replace the stub, so the countdown is only done once.
180 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
181
182 __ pop(rdi);
183 __ pop(rax);
184
185 __ bind(&allocate);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100186 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000187
188 // Now allocate the JSObject on the heap.
189 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
190 __ shl(rdi, Immediate(kPointerSizeLog2));
191 // rdi: size of new object
192 __ AllocateInNewSpace(rdi,
193 rbx,
194 rdi,
195 no_reg,
196 &rt_call,
197 NO_ALLOCATION_FLAGS);
198 // Allocated the JSObject, now initialize the fields.
199 // rax: initial map
200 // rbx: JSObject (not HeapObject tagged - the actual address).
201 // rdi: start of next object
202 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
203 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
204 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
205 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
206 // Set extra fields in the newly allocated object.
207 // rax: initial map
208 // rbx: JSObject
209 // rdi: start of next object
Steve Blocka7e24c12009-10-30 11:49:00 +0000210 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000211 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000212 if (count_constructions) {
213 __ movzxbq(rsi,
214 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
215 __ lea(rsi,
216 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
217 // rsi: offset of first field after pre-allocated fields
218 if (FLAG_debug_code) {
219 __ cmpq(rsi, rdi);
220 __ Assert(less_equal,
221 "Unexpected number of pre-allocated property fields.");
222 }
223 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
224 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
225 }
226 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
227
228 // Add the object tag to make the JSObject real, so that we can continue
229 // and jump into the continuation code at any time from now on. Any
230 // failures need to undo the allocation, so that the heap is in a
231 // consistent state and verifiable.
232 // rax: initial map
233 // rbx: JSObject
234 // rdi: start of next object
235 __ or_(rbx, Immediate(kHeapObjectTag));
236
237 // Check if a non-empty properties array is needed.
238 // Allocate and initialize a FixedArray if it is.
239 // rax: initial map
240 // rbx: JSObject
241 // rdi: start of next object
242 // Calculate total properties described map.
243 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
244 __ movzxbq(rcx,
245 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
246 __ addq(rdx, rcx);
247 // Calculate unused properties past the end of the in-object properties.
248 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
249 __ subq(rdx, rcx);
250 // Done if no extra properties are to be allocated.
251 __ j(zero, &allocated);
252 __ Assert(positive, "Property allocation count failed.");
253
254 // Scale the number of elements by pointer size and add the header for
255 // FixedArrays to the start of the next object calculation from above.
256 // rbx: JSObject
257 // rdi: start of next object (will be start of FixedArray)
258 // rdx: number of elements in properties array
259 __ AllocateInNewSpace(FixedArray::kHeaderSize,
260 times_pointer_size,
261 rdx,
262 rdi,
263 rax,
264 no_reg,
265 &undo_allocation,
266 RESULT_CONTAINS_TOP);
267
268 // Initialize the FixedArray.
269 // rbx: JSObject
270 // rdi: FixedArray
271 // rdx: number of elements
272 // rax: start of next object
273 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
274 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
275 __ Integer32ToSmi(rdx, rdx);
276 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
277
278 // Initialize the fields to undefined.
279 // rbx: JSObject
280 // rdi: FixedArray
281 // rax: start of next object
282 // rdx: number of elements
283 { Label loop, entry;
284 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
285 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
286 __ jmp(&entry);
287 __ bind(&loop);
288 __ movq(Operand(rcx, 0), rdx);
289 __ addq(rcx, Immediate(kPointerSize));
290 __ bind(&entry);
291 __ cmpq(rcx, rax);
292 __ j(below, &loop);
293 }
294
295 // Store the initialized FixedArray into the properties field of
296 // the JSObject
297 // rbx: JSObject
298 // rdi: FixedArray
299 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
300 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
301
302
303 // Continue with JSObject being successfully allocated
304 // rbx: JSObject
305 __ jmp(&allocated);
306
307 // Undo the setting of the new top so that the heap is verifiable. For
308 // example, the map's unused properties potentially do not match the
309 // allocated objects unused properties.
310 // rbx: JSObject (previous new top)
311 __ bind(&undo_allocation);
312 __ UndoAllocationInNewSpace(rbx);
Steve Blocka7e24c12009-10-30 11:49:00 +0000313 }
314
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000315 // Allocate the new receiver object using the runtime call.
316 // rdi: function (constructor)
317 __ bind(&rt_call);
318 // Must restore rdi (constructor) before calling runtime.
319 __ movq(rdi, Operand(rsp, 0));
320 __ push(rdi);
321 __ CallRuntime(Runtime::kNewObject, 1);
322 __ movq(rbx, rax); // store result in rbx
Steve Blocka7e24c12009-10-30 11:49:00 +0000323
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000324 // New object allocated.
325 // rbx: newly allocated object
326 __ bind(&allocated);
327 // Retrieve the function from the stack.
328 __ pop(rdi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000329
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000330 // Retrieve smi-tagged arguments count from the stack.
331 __ movq(rax, Operand(rsp, 0));
332 __ SmiToInteger32(rax, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000333
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000334 // Push the allocated receiver to the stack. We need two copies
335 // because we may have to return the original one and the calling
336 // conventions dictate that the called function pops the receiver.
337 __ push(rbx);
338 __ push(rbx);
339
340 // Setup pointer to last argument.
341 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
342
343 // Copy arguments and receiver to the expression stack.
344 Label loop, entry;
345 __ movq(rcx, rax);
346 __ jmp(&entry);
347 __ bind(&loop);
348 __ push(Operand(rbx, rcx, times_pointer_size, 0));
349 __ bind(&entry);
350 __ decq(rcx);
351 __ j(greater_equal, &loop);
352
353 // Call the function.
354 if (is_api_function) {
355 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
356 Handle<Code> code =
357 masm->isolate()->builtins()->HandleApiCallConstruct();
358 ParameterCount expected(0);
359 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
360 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
361 } else {
362 ParameterCount actual(rax);
363 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
364 NullCallWrapper(), CALL_AS_METHOD);
365 }
366
367 // Restore context from the frame.
368 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
369
370 // If the result is an object (in the ECMA sense), we should get rid
371 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
372 // on page 74.
373 Label use_receiver, exit;
374 // If the result is a smi, it is *not* an object in the ECMA sense.
375 __ JumpIfSmi(rax, &use_receiver);
376
377 // If the type of the result (stored in its map) is less than
378 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
379 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
380 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
381 __ j(above_equal, &exit);
382
383 // Throw away the result of the constructor invocation and use the
384 // on-stack receiver as the result.
385 __ bind(&use_receiver);
386 __ movq(rax, Operand(rsp, 0));
387
388 // Restore the arguments count and leave the construct frame.
389 __ bind(&exit);
390 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
391
392 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000393 }
394
Steve Blocka7e24c12009-10-30 11:49:00 +0000395 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +0000396 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000397 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
398 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000399 __ push(rcx);
Steve Block44f0eee2011-05-26 01:26:41 +0100400 Counters* counters = masm->isolate()->counters();
401 __ IncrementCounter(counters->constructed_objects(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000402 __ ret(0);
403}
404
405
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100406void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
407 Generate_JSConstructStubHelper(masm, false, true);
408}
409
410
Leon Clarkee46be812010-01-19 14:06:41 +0000411void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100412 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000413}
414
415
416void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100417 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000418}
419
420
Steve Blocka7e24c12009-10-30 11:49:00 +0000421static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
422 bool is_construct) {
423 // Expects five C++ function parameters.
424 // - Address entry (ignored)
425 // - JSFunction* function (
426 // - Object* receiver
427 // - int argc
428 // - Object*** argv
429 // (see Handle::Invoke in execution.cc).
430
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000431 // Open a C++ scope for the FrameScope.
432 {
433 // Platform specific argument handling. After this, the stack contains
434 // an internal frame and the pushed function and receiver, and
435 // register rax and rbx holds the argument count and argument array,
436 // while rdi holds the function pointer and rsi the context.
437
Steve Blocka7e24c12009-10-30 11:49:00 +0000438#ifdef _WIN64
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000439 // MSVC parameters in:
440 // rcx : entry (ignored)
441 // rdx : function
442 // r8 : receiver
443 // r9 : argc
444 // [rsp+0x20] : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000445
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000446 // Clear the context before we push it when entering the internal frame.
447 __ Set(rsi, 0);
448 // Enter an internal frame.
449 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000450
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000451 // Load the function context into rsi.
452 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000453
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000454 // Push the function and the receiver onto the stack.
455 __ push(rdx);
456 __ push(r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000457
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000458 // Load the number of arguments and setup pointer to the arguments.
459 __ movq(rax, r9);
460 // Load the previous frame pointer to access C argument on stack
461 __ movq(kScratchRegister, Operand(rbp, 0));
462 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
463 // Load the function pointer into rdi.
464 __ movq(rdi, rdx);
Steve Block6ded16b2010-05-10 14:33:55 +0100465#else // _WIN64
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000466 // GCC parameters in:
467 // rdi : entry (ignored)
468 // rsi : function
469 // rdx : receiver
470 // rcx : argc
471 // r8 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000472
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000473 __ movq(rdi, rsi);
474 // rdi : function
Steve Blocka7e24c12009-10-30 11:49:00 +0000475
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000476 // Clear the context before we push it when entering the internal frame.
477 __ Set(rsi, 0);
478 // Enter an internal frame.
479 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000480
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000481 // Push the function and receiver and setup the context.
482 __ push(rdi);
483 __ push(rdx);
484 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000485
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000486 // Load the number of arguments and setup pointer to the arguments.
487 __ movq(rax, rcx);
488 __ movq(rbx, r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000489#endif // _WIN64
490
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000491 // Current stack contents:
492 // [rsp + 2 * kPointerSize ... ]: Internal frame
493 // [rsp + kPointerSize] : function
494 // [rsp] : receiver
495 // Current register contents:
496 // rax : argc
497 // rbx : argv
498 // rsi : context
499 // rdi : function
Steve Blocka7e24c12009-10-30 11:49:00 +0000500
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000501 // Copy arguments to the stack in a loop.
502 // Register rbx points to array of pointers to handle locations.
503 // Push the values of these handles.
504 Label loop, entry;
505 __ Set(rcx, 0); // Set loop variable to 0.
506 __ jmp(&entry);
507 __ bind(&loop);
508 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
509 __ push(Operand(kScratchRegister, 0)); // dereference handle
510 __ addq(rcx, Immediate(1));
511 __ bind(&entry);
512 __ cmpq(rcx, rax);
513 __ j(not_equal, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000514
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000515 // Invoke the code.
516 if (is_construct) {
517 // Expects rdi to hold function pointer.
518 __ Call(masm->isolate()->builtins()->JSConstructCall(),
519 RelocInfo::CODE_TARGET);
520 } else {
521 ParameterCount actual(rax);
522 // Function must be in rdi.
523 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
524 NullCallWrapper(), CALL_AS_METHOD);
525 }
526 // Exit the internal frame. Notice that this also removes the empty
527 // context and the function left on the stack by the code
528 // invocation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000529 }
530
Steve Blocka7e24c12009-10-30 11:49:00 +0000531 // TODO(X64): Is argument correct? Is there a receiver to remove?
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000532 __ ret(1 * kPointerSize); // Remove receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000533}
534
535
536void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
537 Generate_JSEntryTrampolineHelper(masm, false);
538}
539
540
541void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
542 Generate_JSEntryTrampolineHelper(masm, true);
543}
544
Iain Merrick75681382010-08-19 15:07:18 +0100545
546void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
547 // Enter an internal frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000548 {
549 FrameScope scope(masm, StackFrame::INTERNAL);
Iain Merrick75681382010-08-19 15:07:18 +0100550
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000551 // Push a copy of the function onto the stack.
552 __ push(rdi);
553 // Push call kind information.
554 __ push(rcx);
Iain Merrick75681382010-08-19 15:07:18 +0100555
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000556 __ push(rdi); // Function is also the parameter to the runtime call.
557 __ CallRuntime(Runtime::kLazyCompile, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000558
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000559 // Restore call kind information.
560 __ pop(rcx);
561 // Restore receiver.
562 __ pop(rdi);
Iain Merrick75681382010-08-19 15:07:18 +0100563
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000564 // Tear down internal frame.
565 }
Iain Merrick75681382010-08-19 15:07:18 +0100566
567 // Do a tail-call of the compiled function.
Ben Murdoch257744e2011-11-30 15:57:28 +0000568 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
569 __ jmp(rax);
Iain Merrick75681382010-08-19 15:07:18 +0100570}
571
Ben Murdochb0fe1622011-05-05 13:52:32 +0100572
573void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
574 // Enter an internal frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000575 {
576 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100577
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000578 // Push a copy of the function onto the stack.
579 __ push(rdi);
580 // Push call kind information.
581 __ push(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100582
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000583 __ push(rdi); // Function is also the parameter to the runtime call.
584 __ CallRuntime(Runtime::kLazyRecompile, 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100585
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000586 // Restore call kind information.
587 __ pop(rcx);
588 // Restore function.
589 __ pop(rdi);
Ben Murdoch257744e2011-11-30 15:57:28 +0000590
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000591 // Tear down internal frame.
592 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100593
594 // Do a tail-call of the compiled function.
Ben Murdoch257744e2011-11-30 15:57:28 +0000595 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
596 __ jmp(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100597}
598
599
600static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
601 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +0100602 // Enter an internal frame.
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000603 {
604 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Block1e0659c2011-05-24 12:43:12 +0100605
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000606 // Pass the deoptimization type to the runtime system.
607 __ Push(Smi::FromInt(static_cast<int>(type)));
Steve Block1e0659c2011-05-24 12:43:12 +0100608
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000609 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
610 // Tear down internal frame.
611 }
Steve Block1e0659c2011-05-24 12:43:12 +0100612
613 // Get the full codegen state from the stack and untag it.
614 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
615
616 // Switch on the state.
Ben Murdoch257744e2011-11-30 15:57:28 +0000617 Label not_no_registers, not_tos_rax;
Steve Block1e0659c2011-05-24 12:43:12 +0100618 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
Ben Murdoch257744e2011-11-30 15:57:28 +0000619 __ j(not_equal, &not_no_registers, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100620 __ ret(1 * kPointerSize); // Remove state.
621
622 __ bind(&not_no_registers);
623 __ movq(rax, Operand(rsp, 2 * kPointerSize));
624 __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
Ben Murdoch257744e2011-11-30 15:57:28 +0000625 __ j(not_equal, &not_tos_rax, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100626 __ ret(2 * kPointerSize); // Remove state, rax.
627
628 __ bind(&not_tos_rax);
629 __ Abort("no cases left");
Ben Murdochb0fe1622011-05-05 13:52:32 +0100630}
631
632void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
633 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
634}
635
636
637void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100638 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100639}
640
641
642void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100643 // For now, we are relying on the fact that Runtime::NotifyOSR
644 // doesn't do any garbage collection which allows us to save/restore
645 // the registers without worrying about which of them contain
646 // pointers. This seems a bit fragile.
647 __ Pushad();
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000648 {
649 FrameScope scope(masm, StackFrame::INTERNAL);
650 __ CallRuntime(Runtime::kNotifyOSR, 0);
651 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100652 __ Popad();
653 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100654}
655
656
657void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
658 // Stack Layout:
659 // rsp[0]: Return address
660 // rsp[1]: Argument n
661 // rsp[2]: Argument n-1
662 // ...
663 // rsp[n]: Argument 1
664 // rsp[n+1]: Receiver (function to call)
665 //
666 // rax contains the number of arguments, n, not counting the receiver.
667 //
668 // 1. Make sure we have at least one argument.
669 { Label done;
670 __ testq(rax, rax);
671 __ j(not_zero, &done);
672 __ pop(rbx);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000673 __ Push(masm->isolate()->factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100674 __ push(rbx);
675 __ incq(rax);
676 __ bind(&done);
677 }
678
679 // 2. Get the function to call (passed as receiver) from the stack, check
680 // if it is a function.
Ben Murdoch589d6972011-11-30 16:04:58 +0000681 Label slow, non_function;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100682 // The function to call is at position n+1 on the stack.
683 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
684 __ JumpIfSmi(rdi, &non_function);
685 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
Ben Murdoch589d6972011-11-30 16:04:58 +0000686 __ j(not_equal, &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100687
688 // 3a. Patch the first argument if necessary when calling a function.
689 Label shift_arguments;
Ben Murdoch589d6972011-11-30 16:04:58 +0000690 __ Set(rdx, 0); // indicate regular JS_FUNCTION
Ben Murdochb0fe1622011-05-05 13:52:32 +0100691 { Label convert_to_object, use_global_receiver, patch_receiver;
692 // Change context eagerly in case we need the global receiver.
693 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
694
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100695 // Do not transform the receiver for strict mode functions.
696 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
697 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
698 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
699 __ j(not_equal, &shift_arguments);
700
Ben Murdoch257744e2011-11-30 15:57:28 +0000701 // Do not transform the receiver for natives.
702 // SharedFunctionInfo is already loaded into rbx.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000703 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
704 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
Ben Murdoch257744e2011-11-30 15:57:28 +0000705 __ j(not_zero, &shift_arguments);
706
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100707 // Compute the receiver in non-strict mode.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100708 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +0000709 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100710
711 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
712 __ j(equal, &use_global_receiver);
713 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
714 __ j(equal, &use_global_receiver);
715
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000716 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
717 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +0000718 __ j(above_equal, &shift_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100719
720 __ bind(&convert_to_object);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000721 {
722 // Enter an internal frame in order to preserve argument count.
723 FrameScope scope(masm, StackFrame::INTERNAL);
724 __ Integer32ToSmi(rax, rax);
725 __ push(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100726
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000727 __ push(rbx);
728 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
729 __ movq(rbx, rax);
730 __ Set(rdx, 0); // indicate regular JS_FUNCTION
Ben Murdochb0fe1622011-05-05 13:52:32 +0100731
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000732 __ pop(rax);
733 __ SmiToInteger32(rax, rax);
734 }
735
Ben Murdochb0fe1622011-05-05 13:52:32 +0100736 // Restore the function to rdi.
737 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +0000738 __ jmp(&patch_receiver, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100739
740 // Use the global receiver object from the called function as the
741 // receiver.
742 __ bind(&use_global_receiver);
743 const int kGlobalIndex =
744 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
745 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
746 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
747 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
748 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
749
750 __ bind(&patch_receiver);
751 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
752
753 __ jmp(&shift_arguments);
754 }
755
Ben Murdoch589d6972011-11-30 16:04:58 +0000756 // 3b. Check for function proxy.
757 __ bind(&slow);
758 __ Set(rdx, 1); // indicate function proxy
759 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
760 __ j(equal, &shift_arguments);
761 __ bind(&non_function);
762 __ Set(rdx, 2); // indicate non-function
Ben Murdochb0fe1622011-05-05 13:52:32 +0100763
Ben Murdoch589d6972011-11-30 16:04:58 +0000764 // 3c. Patch the first argument when calling a non-function. The
Ben Murdochb0fe1622011-05-05 13:52:32 +0100765 // CALL_NON_FUNCTION builtin expects the non-function callee as
766 // receiver, so overwrite the first argument which will ultimately
767 // become the receiver.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100768 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100769
770 // 4. Shift arguments and return address one slot down on the stack
771 // (overwriting the original receiver). Adjust argument count to make
772 // the original first argument the new receiver.
773 __ bind(&shift_arguments);
774 { Label loop;
775 __ movq(rcx, rax);
776 __ bind(&loop);
777 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
778 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
779 __ decq(rcx);
780 __ j(not_sign, &loop); // While non-negative (to copy return address).
781 __ pop(rbx); // Discard copy of return address.
782 __ decq(rax); // One fewer argument (first argument is new receiver).
783 }
784
Ben Murdoch589d6972011-11-30 16:04:58 +0000785 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
786 // or a function proxy via CALL_FUNCTION_PROXY.
787 { Label function, non_proxy;
788 __ testq(rdx, rdx);
789 __ j(zero, &function);
Steve Block9fac8402011-05-12 15:51:54 +0100790 __ Set(rbx, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000791 __ SetCallKind(rcx, CALL_AS_METHOD);
Ben Murdoch589d6972011-11-30 16:04:58 +0000792 __ cmpq(rdx, Immediate(1));
793 __ j(not_equal, &non_proxy);
794
795 __ pop(rdx); // return address
796 __ push(rdi); // re-add proxy object as additional argument
797 __ push(rdx);
798 __ incq(rax);
799 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
800 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
801 RelocInfo::CODE_TARGET);
802
803 __ bind(&non_proxy);
804 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +0100805 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100806 RelocInfo::CODE_TARGET);
807 __ bind(&function);
808 }
809
810 // 5b. Get the code to call from the function and check that the number of
811 // expected arguments matches what we're providing. If so, jump
812 // (tail-call) to the code in register edx without checking arguments.
813 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
814 __ movsxlq(rbx,
815 FieldOperand(rdx,
816 SharedFunctionInfo::kFormalParameterCountOffset));
817 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000818 __ SetCallKind(rcx, CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100819 __ cmpq(rax, rbx);
820 __ j(not_equal,
Steve Block44f0eee2011-05-26 01:26:41 +0100821 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100822 RelocInfo::CODE_TARGET);
823
824 ParameterCount expected(0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000825 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION,
826 NullCallWrapper(), CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100827}
828
829
830void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
831 // Stack at entry:
832 // rsp: return address
833 // rsp+8: arguments
834 // rsp+16: receiver ("this")
835 // rsp+24: function
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000836 {
837 FrameScope frame_scope(masm, StackFrame::INTERNAL);
838 // Stack frame:
839 // rbp: Old base pointer
840 // rbp[1]: return address
841 // rbp[2]: function arguments
842 // rbp[3]: receiver
843 // rbp[4]: function
844 static const int kArgumentsOffset = 2 * kPointerSize;
845 static const int kReceiverOffset = 3 * kPointerSize;
846 static const int kFunctionOffset = 4 * kPointerSize;
Ben Murdoch589d6972011-11-30 16:04:58 +0000847
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000848 __ push(Operand(rbp, kFunctionOffset));
849 __ push(Operand(rbp, kArgumentsOffset));
850 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100851
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000852 // Check the stack for overflow. We are not trying to catch
853 // interruptions (e.g. debug break and preemption) here, so the "real stack
854 // limit" is checked.
855 Label okay;
856 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
857 __ movq(rcx, rsp);
858 // Make rcx the space we have left. The stack might already be overflowed
859 // here which will cause rcx to become negative.
860 __ subq(rcx, kScratchRegister);
861 // Make rdx the space we need for the array when it is unrolled onto the
862 // stack.
863 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
864 // Check if the arguments will overflow the stack.
865 __ cmpq(rcx, rdx);
866 __ j(greater, &okay); // Signed comparison.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100867
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000868 // Out of stack space.
869 __ push(Operand(rbp, kFunctionOffset));
870 __ push(rax);
871 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
872 __ bind(&okay);
873 // End of stack check.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100874
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000875 // Push current index and limit.
876 const int kLimitOffset =
877 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
878 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
879 __ push(rax); // limit
880 __ push(Immediate(0)); // index
Ben Murdochb0fe1622011-05-05 13:52:32 +0100881
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000882 // Get the receiver.
883 __ movq(rbx, Operand(rbp, kReceiverOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100884
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000885 // Check that the function is a JS function (otherwise it must be a proxy).
886 Label push_receiver;
887 __ movq(rdi, Operand(rbp, kFunctionOffset));
888 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
889 __ j(not_equal, &push_receiver);
Ben Murdoch589d6972011-11-30 16:04:58 +0000890
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000891 // Change context eagerly to get the right global object if necessary.
892 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +0000893
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000894 // Do not transform the receiver for strict mode functions.
895 Label call_to_object, use_global_receiver;
896 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
897 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
898 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
899 __ j(not_equal, &push_receiver);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100900
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000901 // Do not transform the receiver for natives.
902 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
903 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
904 __ j(not_equal, &push_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +0000905
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000906 // Compute the receiver in non-strict mode.
907 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
908 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
909 __ j(equal, &use_global_receiver);
910 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
911 __ j(equal, &use_global_receiver);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100912
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000913 // If given receiver is already a JavaScript object then there's no
914 // reason for converting it.
915 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
916 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
917 __ j(above_equal, &push_receiver);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100918
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000919 // Convert the receiver to an object.
920 __ bind(&call_to_object);
921 __ push(rbx);
922 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
923 __ movq(rbx, rax);
924 __ jmp(&push_receiver, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100925
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000926 // Use the current global receiver object as the receiver.
927 __ bind(&use_global_receiver);
928 const int kGlobalOffset =
929 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
930 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
931 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
932 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
933 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100934
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000935 // Push the receiver.
936 __ bind(&push_receiver);
937 __ push(rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100938
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000939 // Copy all arguments from the array to the stack.
940 Label entry, loop;
941 __ movq(rax, Operand(rbp, kIndexOffset));
942 __ jmp(&entry);
943 __ bind(&loop);
944 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
Ben Murdochb0fe1622011-05-05 13:52:32 +0100945
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000946 // Use inline caching to speed up access to arguments.
947 Handle<Code> ic =
948 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
949 __ Call(ic, RelocInfo::CODE_TARGET);
950 // It is important that we do not have a test instruction after the
951 // call. A test instruction after the call is used to indicate that
952 // we have generated an inline version of the keyed load. In this
953 // case, we know that we are not generating a test instruction next.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100954
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000955 // Push the nth argument.
956 __ push(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100957
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000958 // Update the index on the stack and in register rax.
959 __ movq(rax, Operand(rbp, kIndexOffset));
960 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
961 __ movq(Operand(rbp, kIndexOffset), rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100962
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000963 __ bind(&entry);
964 __ cmpq(rax, Operand(rbp, kLimitOffset));
965 __ j(not_equal, &loop);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100966
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000967 // Invoke the function.
968 Label call_proxy;
969 ParameterCount actual(rax);
970 __ SmiToInteger32(rax, rax);
971 __ movq(rdi, Operand(rbp, kFunctionOffset));
972 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
973 __ j(not_equal, &call_proxy);
974 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
975 NullCallWrapper(), CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100976
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000977 frame_scope.GenerateLeaveFrame();
978 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
Ben Murdoch589d6972011-11-30 16:04:58 +0000979
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000980 // Invoke the function proxy.
981 __ bind(&call_proxy);
982 __ push(rdi); // add function proxy as last argument
983 __ incq(rax);
984 __ Set(rbx, 0);
985 __ SetCallKind(rcx, CALL_AS_METHOD);
986 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
987 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
988 RelocInfo::CODE_TARGET);
Ben Murdoch589d6972011-11-30 16:04:58 +0000989
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000990 // Leave internal frame.
991 }
Ben Murdoch589d6972011-11-30 16:04:58 +0000992 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
Ben Murdochb0fe1622011-05-05 13:52:32 +0100993}
994
995
Ben Murdochb0fe1622011-05-05 13:52:32 +0100996// Allocate an empty JSArray. The allocated array is put into the result
997// register. If the parameter initial_capacity is larger than zero an elements
998// backing store is allocated with this size and filled with the hole values.
999// Otherwise the elements backing store is set to the empty FixedArray.
1000static void AllocateEmptyJSArray(MacroAssembler* masm,
1001 Register array_function,
1002 Register result,
1003 Register scratch1,
1004 Register scratch2,
1005 Register scratch3,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001006 Label* gc_required) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001007 const int initial_capacity = JSArray::kPreallocatedArrayElements;
1008 STATIC_ASSERT(initial_capacity >= 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001009
1010 // Load the initial map from the array function.
1011 __ movq(scratch1, FieldOperand(array_function,
1012 JSFunction::kPrototypeOrInitialMapOffset));
1013
1014 // Allocate the JSArray object together with space for a fixed array with the
1015 // requested elements.
1016 int size = JSArray::kSize;
1017 if (initial_capacity > 0) {
1018 size += FixedArray::SizeFor(initial_capacity);
1019 }
1020 __ AllocateInNewSpace(size,
1021 result,
1022 scratch2,
1023 scratch3,
1024 gc_required,
1025 TAG_OBJECT);
1026
1027 // Allocated the JSArray. Now initialize the fields except for the elements
1028 // array.
1029 // result: JSObject
1030 // scratch1: initial map
1031 // scratch2: start of next object
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001032 Factory* factory = masm->isolate()->factory();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001033 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
1034 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001035 factory->empty_fixed_array());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001036 // Field JSArray::kElementsOffset is initialized later.
1037 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
1038
1039 // If no storage is requested for the elements array just set the empty
1040 // fixed array.
1041 if (initial_capacity == 0) {
1042 __ Move(FieldOperand(result, JSArray::kElementsOffset),
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001043 factory->empty_fixed_array());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001044 return;
1045 }
1046
1047 // Calculate the location of the elements array and set elements array member
1048 // of the JSArray.
1049 // result: JSObject
1050 // scratch2: start of next object
1051 __ lea(scratch1, Operand(result, JSArray::kSize));
1052 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
1053
1054 // Initialize the FixedArray and fill it with holes. FixedArray length is
1055 // stored as a smi.
1056 // result: JSObject
1057 // scratch1: elements array
1058 // scratch2: start of next object
1059 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001060 factory->fixed_array_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001061 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
1062 Smi::FromInt(initial_capacity));
1063
1064 // Fill the FixedArray with the hole value. Inline the code if short.
1065 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
1066 static const int kLoopUnfoldLimit = 4;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001067 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001068 if (initial_capacity <= kLoopUnfoldLimit) {
1069 // Use a scratch register here to have only one reloc info when unfolding
1070 // the loop.
1071 for (int i = 0; i < initial_capacity; i++) {
1072 __ movq(FieldOperand(scratch1,
1073 FixedArray::kHeaderSize + i * kPointerSize),
1074 scratch3);
1075 }
1076 } else {
1077 Label loop, entry;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001078 __ movq(scratch2, Immediate(initial_capacity));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001079 __ jmp(&entry);
1080 __ bind(&loop);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001081 __ movq(FieldOperand(scratch1,
1082 scratch2,
1083 times_pointer_size,
1084 FixedArray::kHeaderSize),
1085 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001086 __ bind(&entry);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001087 __ decq(scratch2);
1088 __ j(not_sign, &loop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089 }
1090}
1091
1092
1093// Allocate a JSArray with the number of elements stored in a register. The
1094// register array_function holds the built-in Array function and the register
1095// array_size holds the size of the array as a smi. The allocated array is put
1096// into the result register and beginning and end of the FixedArray elements
1097// storage is put into registers elements_array and elements_array_end (see
1098// below for when that is not the case). If the parameter fill_with_holes is
1099// true the allocated elements backing store is filled with the hole values
1100// otherwise it is left uninitialized. When the backing store is filled the
1101// register elements_array is scratched.
1102static void AllocateJSArray(MacroAssembler* masm,
1103 Register array_function, // Array function.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001104 Register array_size, // As a smi, cannot be 0.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001105 Register result,
1106 Register elements_array,
1107 Register elements_array_end,
1108 Register scratch,
1109 bool fill_with_hole,
1110 Label* gc_required) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001111 // Load the initial map from the array function.
1112 __ movq(elements_array,
1113 FieldOperand(array_function,
1114 JSFunction::kPrototypeOrInitialMapOffset));
1115
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001116 if (FLAG_debug_code) { // Assert that array size is not zero.
1117 __ testq(array_size, array_size);
1118 __ Assert(not_zero, "array size is unexpectedly 0");
1119 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001120
1121 // Allocate the JSArray object together with space for a FixedArray with the
1122 // requested elements.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001123 SmiIndex index =
1124 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
1125 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1126 index.scale,
1127 index.reg,
1128 result,
1129 elements_array_end,
1130 scratch,
1131 gc_required,
1132 TAG_OBJECT);
1133
1134 // Allocated the JSArray. Now initialize the fields except for the elements
1135 // array.
1136 // result: JSObject
1137 // elements_array: initial map
1138 // elements_array_end: start of next object
1139 // array_size: size of array (smi)
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001140 Factory* factory = masm->isolate()->factory();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001141 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001142 __ Move(elements_array, factory->empty_fixed_array());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001143 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1144 // Field JSArray::kElementsOffset is initialized later.
1145 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1146
1147 // Calculate the location of the elements array and set elements array member
1148 // of the JSArray.
1149 // result: JSObject
1150 // elements_array_end: start of next object
1151 // array_size: size of array (smi)
1152 __ lea(elements_array, Operand(result, JSArray::kSize));
1153 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1154
1155 // Initialize the fixed array. FixedArray length is stored as a smi.
1156 // result: JSObject
1157 // elements_array: elements array
1158 // elements_array_end: start of next object
1159 // array_size: size of array (smi)
1160 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001161 factory->fixed_array_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001162 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1163 // same.
1164 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1165
1166 // Fill the allocated FixedArray with the hole value if requested.
1167 // result: JSObject
1168 // elements_array: elements array
1169 // elements_array_end: start of next object
Ben Murdochb0fe1622011-05-05 13:52:32 +01001170 if (fill_with_hole) {
1171 Label loop, entry;
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001172 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001173 __ lea(elements_array, Operand(elements_array,
1174 FixedArray::kHeaderSize - kHeapObjectTag));
1175 __ jmp(&entry);
1176 __ bind(&loop);
1177 __ movq(Operand(elements_array, 0), scratch);
1178 __ addq(elements_array, Immediate(kPointerSize));
1179 __ bind(&entry);
1180 __ cmpq(elements_array, elements_array_end);
1181 __ j(below, &loop);
1182 }
1183}
1184
1185
1186// Create a new array for the built-in Array function. This function allocates
1187// the JSArray object and the FixedArray elements array and initializes these.
1188// If the Array cannot be constructed in native code the runtime is called. This
1189// function assumes the following state:
1190// rdi: constructor (built-in Array function)
1191// rax: argc
1192// rsp[0]: return address
1193// rsp[8]: last argument
1194// This function is used for both construct and normal calls of Array. The only
1195// difference between handling a construct call and a normal call is that for a
1196// construct call the constructor function in rdi needs to be preserved for
1197// entering the generic code. In both cases argc in rax needs to be preserved.
1198// Both registers are preserved by this code so no need to differentiate between
1199// a construct call and a normal call.
1200static void ArrayNativeCode(MacroAssembler* masm,
1201 Label *call_generic_code) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001202 Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001203
1204 // Check for array construction with zero arguments.
1205 __ testq(rax, rax);
1206 __ j(not_zero, &argc_one_or_more);
1207
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001208 __ bind(&empty_array);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001209 // Handle construction of an empty array.
1210 AllocateEmptyJSArray(masm,
1211 rdi,
1212 rbx,
1213 rcx,
1214 rdx,
1215 r8,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001216 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001217 Counters* counters = masm->isolate()->counters();
1218 __ IncrementCounter(counters->array_function_native(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001219 __ movq(rax, rbx);
1220 __ ret(kPointerSize);
1221
1222 // Check for one argument. Bail out if argument is not smi or if it is
1223 // negative.
1224 __ bind(&argc_one_or_more);
1225 __ cmpq(rax, Immediate(1));
1226 __ j(not_equal, &argc_two_or_more);
1227 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001228
1229 __ SmiTest(rdx);
1230 __ j(not_zero, &not_empty_array);
1231 __ pop(r8); // Adjust stack.
1232 __ Drop(1);
1233 __ push(r8);
1234 __ movq(rax, Immediate(0)); // Treat this as a call with argc of zero.
1235 __ jmp(&empty_array);
1236
1237 __ bind(&not_empty_array);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001238 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1239
1240 // Handle construction of an empty array of a certain size. Bail out if size
1241 // is to large to actually allocate an elements array.
1242 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
1243 __ j(greater_equal, call_generic_code);
1244
1245 // rax: argc
1246 // rdx: array_size (smi)
1247 // rdi: constructor
1248 // esp[0]: return address
1249 // esp[8]: argument
1250 AllocateJSArray(masm,
1251 rdi,
1252 rdx,
1253 rbx,
1254 rcx,
1255 r8,
1256 r9,
1257 true,
1258 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001259 __ IncrementCounter(counters->array_function_native(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001260 __ movq(rax, rbx);
1261 __ ret(2 * kPointerSize);
1262
1263 // Handle construction of an array from a list of arguments.
1264 __ bind(&argc_two_or_more);
1265 __ movq(rdx, rax);
1266 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1267 // rax: argc
1268 // rdx: array_size (smi)
1269 // rdi: constructor
1270 // esp[0] : return address
1271 // esp[8] : last argument
1272 AllocateJSArray(masm,
1273 rdi,
1274 rdx,
1275 rbx,
1276 rcx,
1277 r8,
1278 r9,
1279 false,
1280 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001281 __ IncrementCounter(counters->array_function_native(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001282
1283 // rax: argc
1284 // rbx: JSArray
1285 // rcx: elements_array
1286 // r8: elements_array_end (untagged)
1287 // esp[0]: return address
1288 // esp[8]: last argument
1289
1290 // Location of the last argument
1291 __ lea(r9, Operand(rsp, kPointerSize));
1292
1293 // Location of the first array element (Parameter fill_with_holes to
1294 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1295 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1296
1297 // rax: argc
1298 // rbx: JSArray
1299 // rdx: location of the first array element
1300 // r9: location of the last argument
1301 // esp[0]: return address
1302 // esp[8]: last argument
1303 Label loop, entry;
1304 __ movq(rcx, rax);
1305 __ jmp(&entry);
1306 __ bind(&loop);
1307 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
1308 __ movq(Operand(rdx, 0), kScratchRegister);
1309 __ addq(rdx, Immediate(kPointerSize));
1310 __ bind(&entry);
1311 __ decq(rcx);
1312 __ j(greater_equal, &loop);
1313
1314 // Remove caller arguments from the stack and return.
1315 // rax: argc
1316 // rbx: JSArray
1317 // esp[0]: return address
1318 // esp[8]: last argument
1319 __ pop(rcx);
1320 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1321 __ push(rcx);
1322 __ movq(rax, rbx);
1323 __ ret(0);
1324}
1325
1326
1327void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1328 // ----------- S t a t e -------------
1329 // -- rax : argc
1330 // -- rsp[0] : return address
1331 // -- rsp[8] : last argument
1332 // -----------------------------------
1333 Label generic_array_code;
1334
1335 // Get the Array function.
1336 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1337
1338 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001339 // Initial map for the builtin Array functions should be maps.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001340 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1341 // Will both indicate a NULL and a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001342 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001343 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1344 __ Check(not_smi, "Unexpected initial map for Array function");
1345 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1346 __ Check(equal, "Unexpected initial map for Array function");
1347 }
1348
1349 // Run the native code for the Array function called as a normal function.
1350 ArrayNativeCode(masm, &generic_array_code);
1351
1352 // Jump to the generic array code in case the specialized code cannot handle
1353 // the construction.
1354 __ bind(&generic_array_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001355 Handle<Code> array_code =
1356 masm->isolate()->builtins()->ArrayCodeGeneric();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001357 __ Jump(array_code, RelocInfo::CODE_TARGET);
1358}
1359
1360
1361void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1362 // ----------- S t a t e -------------
1363 // -- rax : argc
1364 // -- rdi : constructor
1365 // -- rsp[0] : return address
1366 // -- rsp[8] : last argument
1367 // -----------------------------------
1368 Label generic_constructor;
1369
1370 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001371 // The array construct code is only set for the builtin and internal
1372 // Array functions which always have a map.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001373 // Initial map for the builtin Array function should be a map.
1374 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1375 // Will both indicate a NULL and a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001376 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001377 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1378 __ Check(not_smi, "Unexpected initial map for Array function");
1379 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1380 __ Check(equal, "Unexpected initial map for Array function");
1381 }
1382
1383 // Run the native code for the Array function called as constructor.
1384 ArrayNativeCode(masm, &generic_constructor);
1385
1386 // Jump to the generic construct code in case the specialized code cannot
1387 // handle the construction.
1388 __ bind(&generic_constructor);
Steve Block44f0eee2011-05-26 01:26:41 +01001389 Handle<Code> generic_construct_stub =
1390 masm->isolate()->builtins()->JSConstructStubGeneric();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001391 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1392}
1393
1394
1395void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1396 // TODO(849): implement custom construct stub.
1397 // Generate a copy of the generic stub for now.
1398 Generate_JSConstructStubGeneric(masm);
1399}
1400
1401
1402static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1403 __ push(rbp);
1404 __ movq(rbp, rsp);
1405
1406 // Store the arguments adaptor context sentinel.
1407 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1408
1409 // Push the function on the stack.
1410 __ push(rdi);
1411
Ben Murdoch257744e2011-11-30 15:57:28 +00001412 // Preserve the number of arguments on the stack. Must preserve rax,
1413 // rbx and rcx because these registers are used when copying the
Ben Murdochb0fe1622011-05-05 13:52:32 +01001414 // arguments and the receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00001415 __ Integer32ToSmi(r8, rax);
1416 __ push(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001417}
1418
1419
1420static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1421 // Retrieve the number of arguments from the stack. Number is a Smi.
1422 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1423
1424 // Leave the frame.
1425 __ movq(rsp, rbp);
1426 __ pop(rbp);
1427
1428 // Remove caller arguments from the stack.
1429 __ pop(rcx);
1430 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1431 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1432 __ push(rcx);
1433}
1434
1435
1436void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1437 // ----------- S t a t e -------------
1438 // -- rax : actual number of arguments
1439 // -- rbx : expected number of arguments
Ben Murdoch257744e2011-11-30 15:57:28 +00001440 // -- rcx : call kind information
Ben Murdochb0fe1622011-05-05 13:52:32 +01001441 // -- rdx : code entry to call
1442 // -----------------------------------
1443
1444 Label invoke, dont_adapt_arguments;
Steve Block44f0eee2011-05-26 01:26:41 +01001445 Counters* counters = masm->isolate()->counters();
1446 __ IncrementCounter(counters->arguments_adaptors(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001447
1448 Label enough, too_few;
1449 __ cmpq(rax, rbx);
1450 __ j(less, &too_few);
1451 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1452 __ j(equal, &dont_adapt_arguments);
1453
1454 { // Enough parameters: Actual >= expected.
1455 __ bind(&enough);
1456 EnterArgumentsAdaptorFrame(masm);
1457
1458 // Copy receiver and all expected arguments.
1459 const int offset = StandardFrameConstants::kCallerSPOffset;
1460 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001461 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001462
1463 Label copy;
1464 __ bind(&copy);
Ben Murdoch257744e2011-11-30 15:57:28 +00001465 __ incq(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001466 __ push(Operand(rax, 0));
1467 __ subq(rax, Immediate(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001468 __ cmpq(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001469 __ j(less, &copy);
1470 __ jmp(&invoke);
1471 }
1472
1473 { // Too few parameters: Actual < expected.
1474 __ bind(&too_few);
1475 EnterArgumentsAdaptorFrame(masm);
1476
1477 // Copy receiver and all actual arguments.
1478 const int offset = StandardFrameConstants::kCallerSPOffset;
1479 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001480 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001481
1482 Label copy;
1483 __ bind(&copy);
Ben Murdoch257744e2011-11-30 15:57:28 +00001484 __ incq(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001485 __ push(Operand(rdi, 0));
1486 __ subq(rdi, Immediate(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001487 __ cmpq(r8, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001488 __ j(less, &copy);
1489
1490 // Fill remaining expected arguments with undefined values.
1491 Label fill;
1492 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1493 __ bind(&fill);
Ben Murdoch257744e2011-11-30 15:57:28 +00001494 __ incq(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001495 __ push(kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001496 __ cmpq(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001497 __ j(less, &fill);
1498
1499 // Restore function pointer.
1500 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1501 }
1502
1503 // Call the entry point.
1504 __ bind(&invoke);
1505 __ call(rdx);
1506
1507 // Leave frame and return.
1508 LeaveArgumentsAdaptorFrame(masm);
1509 __ ret(0);
1510
1511 // -------------------------------------------
1512 // Dont adapt arguments.
1513 // -------------------------------------------
1514 __ bind(&dont_adapt_arguments);
1515 __ jmp(rdx);
1516}
1517
1518
1519void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001520 // Get the loop depth of the stack guard check. This is recorded in
1521 // a test(rax, depth) instruction right after the call.
1522 Label stack_check;
1523 __ movq(rbx, Operand(rsp, 0)); // return address
1524 __ movzxbq(rbx, Operand(rbx, 1)); // depth
1525
1526 // Get the loop nesting level at which we allow OSR from the
1527 // unoptimized code and check if we want to do OSR yet. If not we
1528 // should perform a stack guard check so we can get interrupts while
1529 // waiting for on-stack replacement.
1530 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1531 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1532 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
1533 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
1534 __ j(greater, &stack_check);
1535
1536 // Pass the function to optimize as the argument to the on-stack
1537 // replacement runtime function.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001538 {
1539 FrameScope scope(masm, StackFrame::INTERNAL);
1540 __ push(rax);
1541 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1542 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001543
1544 // If the result was -1 it means that we couldn't optimize the
1545 // function. Just return and continue in the unoptimized version.
Ben Murdoch257744e2011-11-30 15:57:28 +00001546 Label skip;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001547 __ SmiCompare(rax, Smi::FromInt(-1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001548 __ j(not_equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001549 __ ret(0);
1550
1551 // If we decide not to perform on-stack replacement we perform a
1552 // stack guard check to enable interrupts.
1553 __ bind(&stack_check);
Ben Murdoch257744e2011-11-30 15:57:28 +00001554 Label ok;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001555 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001556 __ j(above_equal, &ok, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001557
1558 StackCheckStub stub;
1559 __ TailCallStub(&stub);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001560 if (FLAG_debug_code) {
1561 __ Abort("Unreachable code: returned from tail call.");
1562 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001563 __ bind(&ok);
1564 __ ret(0);
1565
1566 __ bind(&skip);
1567 // Untag the AST id and push it on the stack.
1568 __ SmiToInteger32(rax, rax);
1569 __ push(rax);
1570
1571 // Generate the code for doing the frame-to-frame translation using
1572 // the deoptimizer infrastructure.
1573 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1574 generator.Generate();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001575}
1576
1577
1578#undef __
1579
Steve Blocka7e24c12009-10-30 11:49:00 +00001580} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001581
1582#endif // V8_TARGET_ARCH_X64