blob: fc4581c342df1a1a55f6e698e288d67523f878fb [file] [log] [blame]
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010029
30#if defined(V8_TARGET_ARCH_X64)
31
Ben Murdoch8b112d22011-06-08 16:22:53 +010032#include "codegen.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010033#include "deoptimizer.h"
34#include "full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035
36namespace v8 {
37namespace internal {
38
Ben Murdochb0fe1622011-05-05 13:52:32 +010039
Steve Blocka7e24c12009-10-30 11:49:00 +000040#define __ ACCESS_MASM(masm)
41
Steve Blocka7e24c12009-10-30 11:49:00 +000042
Leon Clarkee46be812010-01-19 14:06:41 +000043void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id,
45 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e -------------
47 // -- rax : number of arguments excluding receiver
48 // -- rdi : called function (only guaranteed when
49 // extra_args requires it)
50 // -- rsi : context
51 // -- rsp[0] : return address
52 // -- rsp[8] : last argument
53 // -- ...
54 // -- rsp[8 * argc] : first argument (argc == rax)
55 // -- rsp[8 * (argc +1)] : receiver
56 // -----------------------------------
57
58 // Insert extra arguments.
59 int num_extra_args = 0;
60 if (extra_args == NEEDS_CALLED_FUNCTION) {
61 num_extra_args = 1;
62 __ pop(kScratchRegister); // Save return address.
63 __ push(rdi);
64 __ push(kScratchRegister); // Restore return address.
65 } else {
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67 }
68
Steve Block6ded16b2010-05-10 14:33:55 +010069 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000070 // including the receiver and the extra arguments.
71 __ addq(rax, Immediate(num_extra_args + 1));
Steve Block44f0eee2011-05-26 01:26:41 +010072 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000073}
74
75
Steve Blocka7e24c12009-10-30 11:49:00 +000076void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
77 // ----------- S t a t e -------------
78 // -- rax: number of arguments
79 // -- rdi: constructor function
80 // -----------------------------------
81
82 Label non_function_call;
83 // Check that function is not a smi.
84 __ JumpIfSmi(rdi, &non_function_call);
85 // Check that function is a JSFunction.
86 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
87 __ j(not_equal, &non_function_call);
88
89 // Jump to the function-specific construct stub.
90 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
91 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
92 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
93 __ jmp(rbx);
94
Kristian Monsen50ef84f2010-07-29 15:18:00 +010095 // rdi: called object
96 // rax: number of arguments
Steve Blocka7e24c12009-10-30 11:49:00 +000097 __ bind(&non_function_call);
Andrei Popescu402d9372010-02-26 13:31:12 +000098 // Set expected number of arguments to zero (not changing rax).
Ben Murdoch8b112d22011-06-08 16:22:53 +010099 __ Set(rbx, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000100 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
Ben Murdoch257744e2011-11-30 15:57:28 +0000101 __ SetCallKind(rcx, CALL_AS_METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +0100102 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Steve Blocka7e24c12009-10-30 11:49:00 +0000103 RelocInfo::CODE_TARGET);
104}
105
106
Leon Clarkee46be812010-01-19 14:06:41 +0000107static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100108 bool is_api_function,
109 bool count_constructions) {
110 // Should never count constructions for api objects.
111 ASSERT(!is_api_function || !count_constructions);
112
Steve Blocka7e24c12009-10-30 11:49:00 +0000113 // Enter a construct frame.
114 __ EnterConstructFrame();
115
116 // Store a smi-tagged arguments count on the stack.
117 __ Integer32ToSmi(rax, rax);
118 __ push(rax);
119
120 // Push the function to invoke on the stack.
121 __ push(rdi);
122
123 // Try to allocate the object without transitioning into C code. If any of the
124 // preconditions is not met, the code bails out to the runtime call.
125 Label rt_call, allocated;
126 if (FLAG_inline_new) {
127 Label undo_allocation;
128
129#ifdef ENABLE_DEBUGGER_SUPPORT
130 ExternalReference debug_step_in_fp =
Steve Block44f0eee2011-05-26 01:26:41 +0100131 ExternalReference::debug_step_in_fp_address(masm->isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +0000132 __ movq(kScratchRegister, debug_step_in_fp);
133 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
134 __ j(not_equal, &rt_call);
135#endif
136
137 // Verified that the constructor is a JSFunction.
138 // Load the initial map and verify that it is in fact a map.
139 // rdi: constructor
140 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
141 // Will both indicate a NULL and a Smi
142 ASSERT(kSmiTag == 0);
143 __ JumpIfSmi(rax, &rt_call);
144 // rdi: constructor
145 // rax: initial map (if proven valid below)
146 __ CmpObjectType(rax, MAP_TYPE, rbx);
147 __ j(not_equal, &rt_call);
148
149 // Check that the constructor is not constructing a JSFunction (see comments
150 // in Runtime_NewObject in runtime.cc). In which case the initial map's
151 // instance type would be JS_FUNCTION_TYPE.
152 // rdi: constructor
153 // rax: initial map
154 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
155 __ j(equal, &rt_call);
156
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100157 if (count_constructions) {
158 Label allocate;
159 // Decrease generous allocation count.
160 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
161 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
162 __ j(not_zero, &allocate);
163
164 __ push(rax);
165 __ push(rdi);
166
167 __ push(rdi); // constructor
168 // The call will replace the stub, so the countdown is only done once.
169 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
170
171 __ pop(rdi);
172 __ pop(rax);
173
174 __ bind(&allocate);
175 }
176
Steve Blocka7e24c12009-10-30 11:49:00 +0000177 // Now allocate the JSObject on the heap.
178 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
179 __ shl(rdi, Immediate(kPointerSizeLog2));
180 // rdi: size of new object
181 __ AllocateInNewSpace(rdi,
182 rbx,
183 rdi,
184 no_reg,
185 &rt_call,
186 NO_ALLOCATION_FLAGS);
187 // Allocated the JSObject, now initialize the fields.
188 // rax: initial map
189 // rbx: JSObject (not HeapObject tagged - the actual address).
190 // rdi: start of next object
191 __ movq(Operand(rbx, JSObject::kMapOffset), rax);
192 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
193 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
194 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
195 // Set extra fields in the newly allocated object.
196 // rax: initial map
197 // rbx: JSObject
198 // rdi: start of next object
199 { Label loop, entry;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100200 // To allow for truncation.
201 if (count_constructions) {
202 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
203 } else {
204 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
205 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000206 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
207 __ jmp(&entry);
208 __ bind(&loop);
209 __ movq(Operand(rcx, 0), rdx);
210 __ addq(rcx, Immediate(kPointerSize));
211 __ bind(&entry);
212 __ cmpq(rcx, rdi);
213 __ j(less, &loop);
214 }
215
216 // Add the object tag to make the JSObject real, so that we can continue and
217 // jump into the continuation code at any time from now on. Any failures
218 // need to undo the allocation, so that the heap is in a consistent state
219 // and verifiable.
220 // rax: initial map
221 // rbx: JSObject
222 // rdi: start of next object
223 __ or_(rbx, Immediate(kHeapObjectTag));
224
225 // Check if a non-empty properties array is needed.
226 // Allocate and initialize a FixedArray if it is.
227 // rax: initial map
228 // rbx: JSObject
229 // rdi: start of next object
230 // Calculate total properties described map.
231 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
232 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
233 __ addq(rdx, rcx);
234 // Calculate unused properties past the end of the in-object properties.
235 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
236 __ subq(rdx, rcx);
237 // Done if no extra properties are to be allocated.
238 __ j(zero, &allocated);
239 __ Assert(positive, "Property allocation count failed.");
240
241 // Scale the number of elements by pointer size and add the header for
242 // FixedArrays to the start of the next object calculation from above.
243 // rbx: JSObject
244 // rdi: start of next object (will be start of FixedArray)
245 // rdx: number of elements in properties array
246 __ AllocateInNewSpace(FixedArray::kHeaderSize,
247 times_pointer_size,
248 rdx,
249 rdi,
250 rax,
251 no_reg,
252 &undo_allocation,
253 RESULT_CONTAINS_TOP);
254
255 // Initialize the FixedArray.
256 // rbx: JSObject
257 // rdi: FixedArray
258 // rdx: number of elements
259 // rax: start of next object
260 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100261 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
262 __ Integer32ToSmi(rdx, rdx);
263 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
Steve Blocka7e24c12009-10-30 11:49:00 +0000264
265 // Initialize the fields to undefined.
266 // rbx: JSObject
267 // rdi: FixedArray
268 // rax: start of next object
269 // rdx: number of elements
270 { Label loop, entry;
271 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
272 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
273 __ jmp(&entry);
274 __ bind(&loop);
275 __ movq(Operand(rcx, 0), rdx);
276 __ addq(rcx, Immediate(kPointerSize));
277 __ bind(&entry);
278 __ cmpq(rcx, rax);
279 __ j(below, &loop);
280 }
281
282 // Store the initialized FixedArray into the properties field of
283 // the JSObject
284 // rbx: JSObject
285 // rdi: FixedArray
286 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
287 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
288
289
290 // Continue with JSObject being successfully allocated
291 // rbx: JSObject
292 __ jmp(&allocated);
293
294 // Undo the setting of the new top so that the heap is verifiable. For
295 // example, the map's unused properties potentially do not match the
296 // allocated objects unused properties.
297 // rbx: JSObject (previous new top)
298 __ bind(&undo_allocation);
299 __ UndoAllocationInNewSpace(rbx);
300 }
301
302 // Allocate the new receiver object using the runtime call.
303 // rdi: function (constructor)
304 __ bind(&rt_call);
305 // Must restore rdi (constructor) before calling runtime.
306 __ movq(rdi, Operand(rsp, 0));
307 __ push(rdi);
308 __ CallRuntime(Runtime::kNewObject, 1);
309 __ movq(rbx, rax); // store result in rbx
310
311 // New object allocated.
312 // rbx: newly allocated object
313 __ bind(&allocated);
314 // Retrieve the function from the stack.
315 __ pop(rdi);
316
317 // Retrieve smi-tagged arguments count from the stack.
318 __ movq(rax, Operand(rsp, 0));
319 __ SmiToInteger32(rax, rax);
320
321 // Push the allocated receiver to the stack. We need two copies
322 // because we may have to return the original one and the calling
323 // conventions dictate that the called function pops the receiver.
324 __ push(rbx);
325 __ push(rbx);
326
327 // Setup pointer to last argument.
328 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
329
330 // Copy arguments and receiver to the expression stack.
331 Label loop, entry;
332 __ movq(rcx, rax);
333 __ jmp(&entry);
334 __ bind(&loop);
335 __ push(Operand(rbx, rcx, times_pointer_size, 0));
336 __ bind(&entry);
337 __ decq(rcx);
338 __ j(greater_equal, &loop);
339
340 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +0000341 if (is_api_function) {
342 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Steve Block44f0eee2011-05-26 01:26:41 +0100343 Handle<Code> code =
344 masm->isolate()->builtins()->HandleApiCallConstruct();
Leon Clarkee46be812010-01-19 14:06:41 +0000345 ParameterCount expected(0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000346 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
347 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
Leon Clarkee46be812010-01-19 14:06:41 +0000348 } else {
349 ParameterCount actual(rax);
Ben Murdoch257744e2011-11-30 15:57:28 +0000350 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
351 NullCallWrapper(), CALL_AS_METHOD);
Leon Clarkee46be812010-01-19 14:06:41 +0000352 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000353
354 // Restore context from the frame.
355 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
356
357 // If the result is an object (in the ECMA sense), we should get rid
358 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
359 // on page 74.
360 Label use_receiver, exit;
361 // If the result is a smi, it is *not* an object in the ECMA sense.
362 __ JumpIfSmi(rax, &use_receiver);
363
364 // If the type of the result (stored in its map) is less than
365 // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
366 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
367 __ j(above_equal, &exit);
368
369 // Throw away the result of the constructor invocation and use the
370 // on-stack receiver as the result.
371 __ bind(&use_receiver);
372 __ movq(rax, Operand(rsp, 0));
373
374 // Restore the arguments count and leave the construct frame.
375 __ bind(&exit);
376 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
377 __ LeaveConstructFrame();
378
379 // Remove caller arguments from the stack and return.
Steve Blocka7e24c12009-10-30 11:49:00 +0000380 __ pop(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000381 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
382 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000383 __ push(rcx);
Steve Block44f0eee2011-05-26 01:26:41 +0100384 Counters* counters = masm->isolate()->counters();
385 __ IncrementCounter(counters->constructed_objects(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000386 __ ret(0);
387}
388
389
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100390void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
391 Generate_JSConstructStubHelper(masm, false, true);
392}
393
394
Leon Clarkee46be812010-01-19 14:06:41 +0000395void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100396 Generate_JSConstructStubHelper(masm, false, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000397}
398
399
400void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100401 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000402}
403
404
Steve Blocka7e24c12009-10-30 11:49:00 +0000405static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
406 bool is_construct) {
407 // Expects five C++ function parameters.
408 // - Address entry (ignored)
409 // - JSFunction* function (
410 // - Object* receiver
411 // - int argc
412 // - Object*** argv
413 // (see Handle::Invoke in execution.cc).
414
415 // Platform specific argument handling. After this, the stack contains
416 // an internal frame and the pushed function and receiver, and
417 // register rax and rbx holds the argument count and argument array,
418 // while rdi holds the function pointer and rsi the context.
419#ifdef _WIN64
420 // MSVC parameters in:
421 // rcx : entry (ignored)
422 // rdx : function
423 // r8 : receiver
424 // r9 : argc
425 // [rsp+0x20] : argv
426
427 // Clear the context before we push it when entering the JS frame.
Steve Block9fac8402011-05-12 15:51:54 +0100428 __ Set(rsi, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000429 __ EnterInternalFrame();
430
431 // Load the function context into rsi.
432 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
433
434 // Push the function and the receiver onto the stack.
435 __ push(rdx);
436 __ push(r8);
437
438 // Load the number of arguments and setup pointer to the arguments.
439 __ movq(rax, r9);
440 // Load the previous frame pointer to access C argument on stack
441 __ movq(kScratchRegister, Operand(rbp, 0));
442 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
443 // Load the function pointer into rdi.
444 __ movq(rdi, rdx);
Steve Block6ded16b2010-05-10 14:33:55 +0100445#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +0000446 // GCC parameters in:
447 // rdi : entry (ignored)
448 // rsi : function
449 // rdx : receiver
450 // rcx : argc
451 // r8 : argv
452
453 __ movq(rdi, rsi);
454 // rdi : function
455
456 // Clear the context before we push it when entering the JS frame.
Steve Block9fac8402011-05-12 15:51:54 +0100457 __ Set(rsi, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000458 // Enter an internal frame.
459 __ EnterInternalFrame();
460
461 // Push the function and receiver and setup the context.
462 __ push(rdi);
463 __ push(rdx);
464 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
465
466 // Load the number of arguments and setup pointer to the arguments.
467 __ movq(rax, rcx);
468 __ movq(rbx, r8);
469#endif // _WIN64
470
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 // Current stack contents:
472 // [rsp + 2 * kPointerSize ... ]: Internal frame
473 // [rsp + kPointerSize] : function
474 // [rsp] : receiver
475 // Current register contents:
476 // rax : argc
477 // rbx : argv
478 // rsi : context
479 // rdi : function
480
481 // Copy arguments to the stack in a loop.
482 // Register rbx points to array of pointers to handle locations.
483 // Push the values of these handles.
484 Label loop, entry;
Steve Block9fac8402011-05-12 15:51:54 +0100485 __ Set(rcx, 0); // Set loop variable to 0.
Steve Blocka7e24c12009-10-30 11:49:00 +0000486 __ jmp(&entry);
487 __ bind(&loop);
488 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
489 __ push(Operand(kScratchRegister, 0)); // dereference handle
490 __ addq(rcx, Immediate(1));
491 __ bind(&entry);
492 __ cmpq(rcx, rax);
493 __ j(not_equal, &loop);
494
495 // Invoke the code.
496 if (is_construct) {
497 // Expects rdi to hold function pointer.
Steve Block44f0eee2011-05-26 01:26:41 +0100498 __ Call(masm->isolate()->builtins()->JSConstructCall(),
Steve Blocka7e24c12009-10-30 11:49:00 +0000499 RelocInfo::CODE_TARGET);
500 } else {
501 ParameterCount actual(rax);
502 // Function must be in rdi.
Ben Murdoch257744e2011-11-30 15:57:28 +0000503 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
504 NullCallWrapper(), CALL_AS_METHOD);
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 }
506
507 // Exit the JS frame. Notice that this also removes the empty
508 // context and the function left on the stack by the code
509 // invocation.
510 __ LeaveInternalFrame();
511 // TODO(X64): Is argument correct? Is there a receiver to remove?
512 __ ret(1 * kPointerSize); // remove receiver
513}
514
515
516void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
517 Generate_JSEntryTrampolineHelper(masm, false);
518}
519
520
521void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
522 Generate_JSEntryTrampolineHelper(masm, true);
523}
524
Iain Merrick75681382010-08-19 15:07:18 +0100525
526void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
527 // Enter an internal frame.
528 __ EnterInternalFrame();
529
530 // Push a copy of the function onto the stack.
531 __ push(rdi);
Ben Murdoch257744e2011-11-30 15:57:28 +0000532 // Push call kind information.
533 __ push(rcx);
Iain Merrick75681382010-08-19 15:07:18 +0100534
535 __ push(rdi); // Function is also the parameter to the runtime call.
536 __ CallRuntime(Runtime::kLazyCompile, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000537
538 // Restore call kind information.
539 __ pop(rcx);
540 // Restore receiver.
Iain Merrick75681382010-08-19 15:07:18 +0100541 __ pop(rdi);
542
543 // Tear down temporary frame.
544 __ LeaveInternalFrame();
545
546 // Do a tail-call of the compiled function.
Ben Murdoch257744e2011-11-30 15:57:28 +0000547 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
548 __ jmp(rax);
Iain Merrick75681382010-08-19 15:07:18 +0100549}
550
Ben Murdochb0fe1622011-05-05 13:52:32 +0100551
552void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
553 // Enter an internal frame.
554 __ EnterInternalFrame();
555
556 // Push a copy of the function onto the stack.
557 __ push(rdi);
Ben Murdoch257744e2011-11-30 15:57:28 +0000558 // Push call kind information.
559 __ push(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100560
561 __ push(rdi); // Function is also the parameter to the runtime call.
562 __ CallRuntime(Runtime::kLazyRecompile, 1);
563
Ben Murdoch257744e2011-11-30 15:57:28 +0000564 // Restore call kind information.
565 __ pop(rcx);
566 // Restore function.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100567 __ pop(rdi);
Ben Murdoch257744e2011-11-30 15:57:28 +0000568
569 // Tear down temporary frame.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100570 __ LeaveInternalFrame();
571
572 // Do a tail-call of the compiled function.
Ben Murdoch257744e2011-11-30 15:57:28 +0000573 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
574 __ jmp(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100575}
576
577
578static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
579 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +0100580 // Enter an internal frame.
581 __ EnterInternalFrame();
582
583 // Pass the deoptimization type to the runtime system.
584 __ Push(Smi::FromInt(static_cast<int>(type)));
585
586 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
587 // Tear down temporary frame.
588 __ LeaveInternalFrame();
589
590 // Get the full codegen state from the stack and untag it.
591 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
592
593 // Switch on the state.
Ben Murdoch257744e2011-11-30 15:57:28 +0000594 Label not_no_registers, not_tos_rax;
Steve Block1e0659c2011-05-24 12:43:12 +0100595 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
Ben Murdoch257744e2011-11-30 15:57:28 +0000596 __ j(not_equal, &not_no_registers, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100597 __ ret(1 * kPointerSize); // Remove state.
598
599 __ bind(&not_no_registers);
600 __ movq(rax, Operand(rsp, 2 * kPointerSize));
601 __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
Ben Murdoch257744e2011-11-30 15:57:28 +0000602 __ j(not_equal, &not_tos_rax, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100603 __ ret(2 * kPointerSize); // Remove state, rax.
604
605 __ bind(&not_tos_rax);
606 __ Abort("no cases left");
Ben Murdochb0fe1622011-05-05 13:52:32 +0100607}
608
609void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
610 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
611}
612
613
614void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100615 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100616}
617
618
619void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100620 // For now, we are relying on the fact that Runtime::NotifyOSR
621 // doesn't do any garbage collection which allows us to save/restore
622 // the registers without worrying about which of them contain
623 // pointers. This seems a bit fragile.
624 __ Pushad();
625 __ EnterInternalFrame();
626 __ CallRuntime(Runtime::kNotifyOSR, 0);
627 __ LeaveInternalFrame();
628 __ Popad();
629 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100630}
631
632
633void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
634 // Stack Layout:
635 // rsp[0]: Return address
636 // rsp[1]: Argument n
637 // rsp[2]: Argument n-1
638 // ...
639 // rsp[n]: Argument 1
640 // rsp[n+1]: Receiver (function to call)
641 //
642 // rax contains the number of arguments, n, not counting the receiver.
643 //
644 // 1. Make sure we have at least one argument.
645 { Label done;
646 __ testq(rax, rax);
647 __ j(not_zero, &done);
648 __ pop(rbx);
Steve Block44f0eee2011-05-26 01:26:41 +0100649 __ Push(FACTORY->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100650 __ push(rbx);
651 __ incq(rax);
652 __ bind(&done);
653 }
654
655 // 2. Get the function to call (passed as receiver) from the stack, check
656 // if it is a function.
657 Label non_function;
658 // The function to call is at position n+1 on the stack.
659 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
660 __ JumpIfSmi(rdi, &non_function);
661 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
662 __ j(not_equal, &non_function);
663
664 // 3a. Patch the first argument if necessary when calling a function.
665 Label shift_arguments;
666 { Label convert_to_object, use_global_receiver, patch_receiver;
667 // Change context eagerly in case we need the global receiver.
668 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
669
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100670 // Do not transform the receiver for strict mode functions.
671 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
672 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
673 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
674 __ j(not_equal, &shift_arguments);
675
Ben Murdoch257744e2011-11-30 15:57:28 +0000676 // Do not transform the receiver for natives.
677 // SharedFunctionInfo is already loaded into rbx.
678 __ testb(FieldOperand(rbx, SharedFunctionInfo::kES5NativeByteOffset),
679 Immediate(1 << SharedFunctionInfo::kES5NativeBitWithinByte));
680 __ j(not_zero, &shift_arguments);
681
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100682 // Compute the receiver in non-strict mode.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100683 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
Ben Murdoch257744e2011-11-30 15:57:28 +0000684 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100685
686 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
687 __ j(equal, &use_global_receiver);
688 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
689 __ j(equal, &use_global_receiver);
690
Ben Murdoch257744e2011-11-30 15:57:28 +0000691 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
692 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100693 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +0000694 __ j(above_equal, &shift_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100695
696 __ bind(&convert_to_object);
697 __ EnterInternalFrame(); // In order to preserve argument count.
698 __ Integer32ToSmi(rax, rax);
699 __ push(rax);
700
701 __ push(rbx);
702 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
703 __ movq(rbx, rax);
704
705 __ pop(rax);
706 __ SmiToInteger32(rax, rax);
707 __ LeaveInternalFrame();
708 // Restore the function to rdi.
709 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +0000710 __ jmp(&patch_receiver, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100711
712 // Use the global receiver object from the called function as the
713 // receiver.
714 __ bind(&use_global_receiver);
715 const int kGlobalIndex =
716 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
717 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
718 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
719 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
720 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
721
722 __ bind(&patch_receiver);
723 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
724
725 __ jmp(&shift_arguments);
726 }
727
728
729 // 3b. Patch the first argument when calling a non-function. The
730 // CALL_NON_FUNCTION builtin expects the non-function callee as
731 // receiver, so overwrite the first argument which will ultimately
732 // become the receiver.
733 __ bind(&non_function);
734 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
Steve Block9fac8402011-05-12 15:51:54 +0100735 __ Set(rdi, 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100736
737 // 4. Shift arguments and return address one slot down on the stack
738 // (overwriting the original receiver). Adjust argument count to make
739 // the original first argument the new receiver.
740 __ bind(&shift_arguments);
741 { Label loop;
742 __ movq(rcx, rax);
743 __ bind(&loop);
744 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
745 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
746 __ decq(rcx);
747 __ j(not_sign, &loop); // While non-negative (to copy return address).
748 __ pop(rbx); // Discard copy of return address.
749 __ decq(rax); // One fewer argument (first argument is new receiver).
750 }
751
752 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
753 { Label function;
754 __ testq(rdi, rdi);
755 __ j(not_zero, &function);
Steve Block9fac8402011-05-12 15:51:54 +0100756 __ Set(rbx, 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100757 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +0000758 __ SetCallKind(rcx, CALL_AS_METHOD);
Steve Block44f0eee2011-05-26 01:26:41 +0100759 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100760 RelocInfo::CODE_TARGET);
761 __ bind(&function);
762 }
763
764 // 5b. Get the code to call from the function and check that the number of
765 // expected arguments matches what we're providing. If so, jump
766 // (tail-call) to the code in register edx without checking arguments.
767 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
768 __ movsxlq(rbx,
769 FieldOperand(rdx,
770 SharedFunctionInfo::kFormalParameterCountOffset));
771 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000772 __ SetCallKind(rcx, CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100773 __ cmpq(rax, rbx);
774 __ j(not_equal,
Steve Block44f0eee2011-05-26 01:26:41 +0100775 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100776 RelocInfo::CODE_TARGET);
777
778 ParameterCount expected(0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000779 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION,
780 NullCallWrapper(), CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100781}
782
783
784void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
785 // Stack at entry:
786 // rsp: return address
787 // rsp+8: arguments
788 // rsp+16: receiver ("this")
789 // rsp+24: function
790 __ EnterInternalFrame();
791 // Stack frame:
792 // rbp: Old base pointer
793 // rbp[1]: return address
794 // rbp[2]: function arguments
795 // rbp[3]: receiver
796 // rbp[4]: function
797 static const int kArgumentsOffset = 2 * kPointerSize;
798 static const int kReceiverOffset = 3 * kPointerSize;
799 static const int kFunctionOffset = 4 * kPointerSize;
800 __ push(Operand(rbp, kFunctionOffset));
801 __ push(Operand(rbp, kArgumentsOffset));
802 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
803
804 // Check the stack for overflow. We are not trying need to catch
805 // interruptions (e.g. debug break and preemption) here, so the "real stack
806 // limit" is checked.
807 Label okay;
808 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
809 __ movq(rcx, rsp);
810 // Make rcx the space we have left. The stack might already be overflowed
811 // here which will cause rcx to become negative.
812 __ subq(rcx, kScratchRegister);
813 // Make rdx the space we need for the array when it is unrolled onto the
814 // stack.
815 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
816 // Check if the arguments will overflow the stack.
817 __ cmpq(rcx, rdx);
818 __ j(greater, &okay); // Signed comparison.
819
820 // Out of stack space.
821 __ push(Operand(rbp, kFunctionOffset));
822 __ push(rax);
823 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
824 __ bind(&okay);
825 // End of stack check.
826
827 // Push current index and limit.
828 const int kLimitOffset =
829 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
830 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
831 __ push(rax); // limit
832 __ push(Immediate(0)); // index
833
834 // Change context eagerly to get the right global object if
835 // necessary.
836 __ movq(rdi, Operand(rbp, kFunctionOffset));
837 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
838
839 // Compute the receiver.
840 Label call_to_object, use_global_receiver, push_receiver;
841 __ movq(rbx, Operand(rbp, kReceiverOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100842
843 // Do not transform the receiver for strict mode functions.
844 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
845 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
846 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
847 __ j(not_equal, &push_receiver);
848
Ben Murdoch257744e2011-11-30 15:57:28 +0000849 // Do not transform the receiver for natives.
850 __ testb(FieldOperand(rdx, SharedFunctionInfo::kES5NativeByteOffset),
851 Immediate(1 << SharedFunctionInfo::kES5NativeBitWithinByte));
852 __ j(not_zero, &push_receiver);
853
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100854 // Compute the receiver in non-strict mode.
Ben Murdoch257744e2011-11-30 15:57:28 +0000855 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100856 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
857 __ j(equal, &use_global_receiver);
858 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
859 __ j(equal, &use_global_receiver);
860
861 // If given receiver is already a JavaScript object then there's no
862 // reason for converting it.
Ben Murdoch257744e2011-11-30 15:57:28 +0000863 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
864 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100865 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +0000866 __ j(above_equal, &push_receiver);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100867
868 // Convert the receiver to an object.
869 __ bind(&call_to_object);
870 __ push(rbx);
871 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
872 __ movq(rbx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +0000873 __ jmp(&push_receiver, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100874
875 // Use the current global receiver object as the receiver.
876 __ bind(&use_global_receiver);
877 const int kGlobalOffset =
878 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
879 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
880 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
881 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
882 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
883
884 // Push the receiver.
885 __ bind(&push_receiver);
886 __ push(rbx);
887
888 // Copy all arguments from the array to the stack.
889 Label entry, loop;
890 __ movq(rax, Operand(rbp, kIndexOffset));
891 __ jmp(&entry);
892 __ bind(&loop);
893 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
894
895 // Use inline caching to speed up access to arguments.
Steve Block44f0eee2011-05-26 01:26:41 +0100896 Handle<Code> ic =
897 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100898 __ Call(ic, RelocInfo::CODE_TARGET);
899 // It is important that we do not have a test instruction after the
900 // call. A test instruction after the call is used to indicate that
901 // we have generated an inline version of the keyed load. In this
902 // case, we know that we are not generating a test instruction next.
903
904 // Push the nth argument.
905 __ push(rax);
906
907 // Update the index on the stack and in register rax.
908 __ movq(rax, Operand(rbp, kIndexOffset));
909 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
910 __ movq(Operand(rbp, kIndexOffset), rax);
911
912 __ bind(&entry);
913 __ cmpq(rax, Operand(rbp, kLimitOffset));
914 __ j(not_equal, &loop);
915
916 // Invoke the function.
917 ParameterCount actual(rax);
918 __ SmiToInteger32(rax, rax);
919 __ movq(rdi, Operand(rbp, kFunctionOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000920 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
921 NullCallWrapper(), CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100922
923 __ LeaveInternalFrame();
924 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
925}
926
927
928// Number of empty elements to allocate for an empty array.
929static const int kPreallocatedArrayElements = 4;
930
931
932// Allocate an empty JSArray. The allocated array is put into the result
933// register. If the parameter initial_capacity is larger than zero an elements
934// backing store is allocated with this size and filled with the hole values.
935// Otherwise the elements backing store is set to the empty FixedArray.
936static void AllocateEmptyJSArray(MacroAssembler* masm,
937 Register array_function,
938 Register result,
939 Register scratch1,
940 Register scratch2,
941 Register scratch3,
942 int initial_capacity,
943 Label* gc_required) {
944 ASSERT(initial_capacity >= 0);
945
946 // Load the initial map from the array function.
947 __ movq(scratch1, FieldOperand(array_function,
948 JSFunction::kPrototypeOrInitialMapOffset));
949
950 // Allocate the JSArray object together with space for a fixed array with the
951 // requested elements.
952 int size = JSArray::kSize;
953 if (initial_capacity > 0) {
954 size += FixedArray::SizeFor(initial_capacity);
955 }
956 __ AllocateInNewSpace(size,
957 result,
958 scratch2,
959 scratch3,
960 gc_required,
961 TAG_OBJECT);
962
963 // Allocated the JSArray. Now initialize the fields except for the elements
964 // array.
965 // result: JSObject
966 // scratch1: initial map
967 // scratch2: start of next object
968 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
969 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
Steve Block44f0eee2011-05-26 01:26:41 +0100970 FACTORY->empty_fixed_array());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100971 // Field JSArray::kElementsOffset is initialized later.
972 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
973
974 // If no storage is requested for the elements array just set the empty
975 // fixed array.
976 if (initial_capacity == 0) {
977 __ Move(FieldOperand(result, JSArray::kElementsOffset),
Steve Block44f0eee2011-05-26 01:26:41 +0100978 FACTORY->empty_fixed_array());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100979 return;
980 }
981
982 // Calculate the location of the elements array and set elements array member
983 // of the JSArray.
984 // result: JSObject
985 // scratch2: start of next object
986 __ lea(scratch1, Operand(result, JSArray::kSize));
987 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
988
989 // Initialize the FixedArray and fill it with holes. FixedArray length is
990 // stored as a smi.
991 // result: JSObject
992 // scratch1: elements array
993 // scratch2: start of next object
994 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +0100995 FACTORY->fixed_array_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100996 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
997 Smi::FromInt(initial_capacity));
998
999 // Fill the FixedArray with the hole value. Inline the code if short.
1000 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
1001 static const int kLoopUnfoldLimit = 4;
1002 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
Steve Block44f0eee2011-05-26 01:26:41 +01001003 __ Move(scratch3, FACTORY->the_hole_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001004 if (initial_capacity <= kLoopUnfoldLimit) {
1005 // Use a scratch register here to have only one reloc info when unfolding
1006 // the loop.
1007 for (int i = 0; i < initial_capacity; i++) {
1008 __ movq(FieldOperand(scratch1,
1009 FixedArray::kHeaderSize + i * kPointerSize),
1010 scratch3);
1011 }
1012 } else {
1013 Label loop, entry;
1014 __ jmp(&entry);
1015 __ bind(&loop);
1016 __ movq(Operand(scratch1, 0), scratch3);
1017 __ addq(scratch1, Immediate(kPointerSize));
1018 __ bind(&entry);
1019 __ cmpq(scratch1, scratch2);
1020 __ j(below, &loop);
1021 }
1022}
1023
1024
1025// Allocate a JSArray with the number of elements stored in a register. The
1026// register array_function holds the built-in Array function and the register
1027// array_size holds the size of the array as a smi. The allocated array is put
1028// into the result register and beginning and end of the FixedArray elements
1029// storage is put into registers elements_array and elements_array_end (see
1030// below for when that is not the case). If the parameter fill_with_holes is
1031// true the allocated elements backing store is filled with the hole values
1032// otherwise it is left uninitialized. When the backing store is filled the
1033// register elements_array is scratched.
1034static void AllocateJSArray(MacroAssembler* masm,
1035 Register array_function, // Array function.
1036 Register array_size, // As a smi.
1037 Register result,
1038 Register elements_array,
1039 Register elements_array_end,
1040 Register scratch,
1041 bool fill_with_hole,
1042 Label* gc_required) {
1043 Label not_empty, allocated;
1044
1045 // Load the initial map from the array function.
1046 __ movq(elements_array,
1047 FieldOperand(array_function,
1048 JSFunction::kPrototypeOrInitialMapOffset));
1049
1050 // Check whether an empty sized array is requested.
1051 __ testq(array_size, array_size);
1052 __ j(not_zero, &not_empty);
1053
1054 // If an empty array is requested allocate a small elements array anyway. This
1055 // keeps the code below free of special casing for the empty array.
1056 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
1057 __ AllocateInNewSpace(size,
1058 result,
1059 elements_array_end,
1060 scratch,
1061 gc_required,
1062 TAG_OBJECT);
1063 __ jmp(&allocated);
1064
1065 // Allocate the JSArray object together with space for a FixedArray with the
1066 // requested elements.
1067 __ bind(&not_empty);
1068 SmiIndex index =
1069 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
1070 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1071 index.scale,
1072 index.reg,
1073 result,
1074 elements_array_end,
1075 scratch,
1076 gc_required,
1077 TAG_OBJECT);
1078
1079 // Allocated the JSArray. Now initialize the fields except for the elements
1080 // array.
1081 // result: JSObject
1082 // elements_array: initial map
1083 // elements_array_end: start of next object
1084 // array_size: size of array (smi)
1085 __ bind(&allocated);
1086 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
Steve Block44f0eee2011-05-26 01:26:41 +01001087 __ Move(elements_array, FACTORY->empty_fixed_array());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001088 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1089 // Field JSArray::kElementsOffset is initialized later.
1090 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1091
1092 // Calculate the location of the elements array and set elements array member
1093 // of the JSArray.
1094 // result: JSObject
1095 // elements_array_end: start of next object
1096 // array_size: size of array (smi)
1097 __ lea(elements_array, Operand(result, JSArray::kSize));
1098 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1099
1100 // Initialize the fixed array. FixedArray length is stored as a smi.
1101 // result: JSObject
1102 // elements_array: elements array
1103 // elements_array_end: start of next object
1104 // array_size: size of array (smi)
1105 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001106 FACTORY->fixed_array_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001107 Label not_empty_2, fill_array;
1108 __ SmiTest(array_size);
1109 __ j(not_zero, &not_empty_2);
1110 // Length of the FixedArray is the number of pre-allocated elements even
1111 // though the actual JSArray has length 0.
1112 __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
1113 Smi::FromInt(kPreallocatedArrayElements));
1114 __ jmp(&fill_array);
1115 __ bind(&not_empty_2);
1116 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1117 // same.
1118 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1119
1120 // Fill the allocated FixedArray with the hole value if requested.
1121 // result: JSObject
1122 // elements_array: elements array
1123 // elements_array_end: start of next object
1124 __ bind(&fill_array);
1125 if (fill_with_hole) {
1126 Label loop, entry;
Steve Block44f0eee2011-05-26 01:26:41 +01001127 __ Move(scratch, FACTORY->the_hole_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001128 __ lea(elements_array, Operand(elements_array,
1129 FixedArray::kHeaderSize - kHeapObjectTag));
1130 __ jmp(&entry);
1131 __ bind(&loop);
1132 __ movq(Operand(elements_array, 0), scratch);
1133 __ addq(elements_array, Immediate(kPointerSize));
1134 __ bind(&entry);
1135 __ cmpq(elements_array, elements_array_end);
1136 __ j(below, &loop);
1137 }
1138}
1139
1140
1141// Create a new array for the built-in Array function. This function allocates
1142// the JSArray object and the FixedArray elements array and initializes these.
1143// If the Array cannot be constructed in native code the runtime is called. This
1144// function assumes the following state:
1145// rdi: constructor (built-in Array function)
1146// rax: argc
1147// rsp[0]: return address
1148// rsp[8]: last argument
1149// This function is used for both construct and normal calls of Array. The only
1150// difference between handling a construct call and a normal call is that for a
1151// construct call the constructor function in rdi needs to be preserved for
1152// entering the generic code. In both cases argc in rax needs to be preserved.
1153// Both registers are preserved by this code so no need to differentiate between
1154// a construct call and a normal call.
1155static void ArrayNativeCode(MacroAssembler* masm,
1156 Label *call_generic_code) {
1157 Label argc_one_or_more, argc_two_or_more;
1158
1159 // Check for array construction with zero arguments.
1160 __ testq(rax, rax);
1161 __ j(not_zero, &argc_one_or_more);
1162
1163 // Handle construction of an empty array.
1164 AllocateEmptyJSArray(masm,
1165 rdi,
1166 rbx,
1167 rcx,
1168 rdx,
1169 r8,
1170 kPreallocatedArrayElements,
1171 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001172 Counters* counters = masm->isolate()->counters();
1173 __ IncrementCounter(counters->array_function_native(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001174 __ movq(rax, rbx);
1175 __ ret(kPointerSize);
1176
1177 // Check for one argument. Bail out if argument is not smi or if it is
1178 // negative.
1179 __ bind(&argc_one_or_more);
1180 __ cmpq(rax, Immediate(1));
1181 __ j(not_equal, &argc_two_or_more);
1182 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
1183 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1184
1185 // Handle construction of an empty array of a certain size. Bail out if size
1186 // is to large to actually allocate an elements array.
1187 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
1188 __ j(greater_equal, call_generic_code);
1189
1190 // rax: argc
1191 // rdx: array_size (smi)
1192 // rdi: constructor
1193 // esp[0]: return address
1194 // esp[8]: argument
1195 AllocateJSArray(masm,
1196 rdi,
1197 rdx,
1198 rbx,
1199 rcx,
1200 r8,
1201 r9,
1202 true,
1203 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001204 __ IncrementCounter(counters->array_function_native(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001205 __ movq(rax, rbx);
1206 __ ret(2 * kPointerSize);
1207
1208 // Handle construction of an array from a list of arguments.
1209 __ bind(&argc_two_or_more);
1210 __ movq(rdx, rax);
1211 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1212 // rax: argc
1213 // rdx: array_size (smi)
1214 // rdi: constructor
1215 // esp[0] : return address
1216 // esp[8] : last argument
1217 AllocateJSArray(masm,
1218 rdi,
1219 rdx,
1220 rbx,
1221 rcx,
1222 r8,
1223 r9,
1224 false,
1225 call_generic_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001226 __ IncrementCounter(counters->array_function_native(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001227
1228 // rax: argc
1229 // rbx: JSArray
1230 // rcx: elements_array
1231 // r8: elements_array_end (untagged)
1232 // esp[0]: return address
1233 // esp[8]: last argument
1234
1235 // Location of the last argument
1236 __ lea(r9, Operand(rsp, kPointerSize));
1237
1238 // Location of the first array element (Parameter fill_with_holes to
1239 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1240 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1241
1242 // rax: argc
1243 // rbx: JSArray
1244 // rdx: location of the first array element
1245 // r9: location of the last argument
1246 // esp[0]: return address
1247 // esp[8]: last argument
1248 Label loop, entry;
1249 __ movq(rcx, rax);
1250 __ jmp(&entry);
1251 __ bind(&loop);
1252 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
1253 __ movq(Operand(rdx, 0), kScratchRegister);
1254 __ addq(rdx, Immediate(kPointerSize));
1255 __ bind(&entry);
1256 __ decq(rcx);
1257 __ j(greater_equal, &loop);
1258
1259 // Remove caller arguments from the stack and return.
1260 // rax: argc
1261 // rbx: JSArray
1262 // esp[0]: return address
1263 // esp[8]: last argument
1264 __ pop(rcx);
1265 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1266 __ push(rcx);
1267 __ movq(rax, rbx);
1268 __ ret(0);
1269}
1270
1271
1272void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1273 // ----------- S t a t e -------------
1274 // -- rax : argc
1275 // -- rsp[0] : return address
1276 // -- rsp[8] : last argument
1277 // -----------------------------------
1278 Label generic_array_code;
1279
1280 // Get the Array function.
1281 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1282
1283 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001284 // Initial map for the builtin Array functions should be maps.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001285 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1286 // Will both indicate a NULL and a Smi.
1287 ASSERT(kSmiTag == 0);
1288 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1289 __ Check(not_smi, "Unexpected initial map for Array function");
1290 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1291 __ Check(equal, "Unexpected initial map for Array function");
1292 }
1293
1294 // Run the native code for the Array function called as a normal function.
1295 ArrayNativeCode(masm, &generic_array_code);
1296
1297 // Jump to the generic array code in case the specialized code cannot handle
1298 // the construction.
1299 __ bind(&generic_array_code);
Steve Block44f0eee2011-05-26 01:26:41 +01001300 Handle<Code> array_code =
1301 masm->isolate()->builtins()->ArrayCodeGeneric();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001302 __ Jump(array_code, RelocInfo::CODE_TARGET);
1303}
1304
1305
1306void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1307 // ----------- S t a t e -------------
1308 // -- rax : argc
1309 // -- rdi : constructor
1310 // -- rsp[0] : return address
1311 // -- rsp[8] : last argument
1312 // -----------------------------------
1313 Label generic_constructor;
1314
1315 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001316 // The array construct code is only set for the builtin and internal
1317 // Array functions which always have a map.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001318 // Initial map for the builtin Array function should be a map.
1319 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1320 // Will both indicate a NULL and a Smi.
1321 ASSERT(kSmiTag == 0);
1322 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1323 __ Check(not_smi, "Unexpected initial map for Array function");
1324 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1325 __ Check(equal, "Unexpected initial map for Array function");
1326 }
1327
1328 // Run the native code for the Array function called as constructor.
1329 ArrayNativeCode(masm, &generic_constructor);
1330
1331 // Jump to the generic construct code in case the specialized code cannot
1332 // handle the construction.
1333 __ bind(&generic_constructor);
Steve Block44f0eee2011-05-26 01:26:41 +01001334 Handle<Code> generic_construct_stub =
1335 masm->isolate()->builtins()->JSConstructStubGeneric();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001336 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1337}
1338
1339
1340void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1341 // TODO(849): implement custom construct stub.
1342 // Generate a copy of the generic stub for now.
1343 Generate_JSConstructStubGeneric(masm);
1344}
1345
1346
1347static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1348 __ push(rbp);
1349 __ movq(rbp, rsp);
1350
1351 // Store the arguments adaptor context sentinel.
1352 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1353
1354 // Push the function on the stack.
1355 __ push(rdi);
1356
Ben Murdoch257744e2011-11-30 15:57:28 +00001357 // Preserve the number of arguments on the stack. Must preserve rax,
1358 // rbx and rcx because these registers are used when copying the
Ben Murdochb0fe1622011-05-05 13:52:32 +01001359 // arguments and the receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00001360 __ Integer32ToSmi(r8, rax);
1361 __ push(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001362}
1363
1364
1365static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1366 // Retrieve the number of arguments from the stack. Number is a Smi.
1367 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1368
1369 // Leave the frame.
1370 __ movq(rsp, rbp);
1371 __ pop(rbp);
1372
1373 // Remove caller arguments from the stack.
1374 __ pop(rcx);
1375 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1376 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1377 __ push(rcx);
1378}
1379
1380
1381void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1382 // ----------- S t a t e -------------
1383 // -- rax : actual number of arguments
1384 // -- rbx : expected number of arguments
Ben Murdoch257744e2011-11-30 15:57:28 +00001385 // -- rcx : call kind information
Ben Murdochb0fe1622011-05-05 13:52:32 +01001386 // -- rdx : code entry to call
1387 // -----------------------------------
1388
1389 Label invoke, dont_adapt_arguments;
Steve Block44f0eee2011-05-26 01:26:41 +01001390 Counters* counters = masm->isolate()->counters();
1391 __ IncrementCounter(counters->arguments_adaptors(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001392
1393 Label enough, too_few;
1394 __ cmpq(rax, rbx);
1395 __ j(less, &too_few);
1396 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1397 __ j(equal, &dont_adapt_arguments);
1398
1399 { // Enough parameters: Actual >= expected.
1400 __ bind(&enough);
1401 EnterArgumentsAdaptorFrame(masm);
1402
1403 // Copy receiver and all expected arguments.
1404 const int offset = StandardFrameConstants::kCallerSPOffset;
1405 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001406 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001407
1408 Label copy;
1409 __ bind(&copy);
Ben Murdoch257744e2011-11-30 15:57:28 +00001410 __ incq(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001411 __ push(Operand(rax, 0));
1412 __ subq(rax, Immediate(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001413 __ cmpq(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001414 __ j(less, &copy);
1415 __ jmp(&invoke);
1416 }
1417
1418 { // Too few parameters: Actual < expected.
1419 __ bind(&too_few);
1420 EnterArgumentsAdaptorFrame(masm);
1421
1422 // Copy receiver and all actual arguments.
1423 const int offset = StandardFrameConstants::kCallerSPOffset;
1424 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001425 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001426
1427 Label copy;
1428 __ bind(&copy);
Ben Murdoch257744e2011-11-30 15:57:28 +00001429 __ incq(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001430 __ push(Operand(rdi, 0));
1431 __ subq(rdi, Immediate(kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001432 __ cmpq(r8, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001433 __ j(less, &copy);
1434
1435 // Fill remaining expected arguments with undefined values.
1436 Label fill;
1437 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1438 __ bind(&fill);
Ben Murdoch257744e2011-11-30 15:57:28 +00001439 __ incq(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001440 __ push(kScratchRegister);
Ben Murdoch257744e2011-11-30 15:57:28 +00001441 __ cmpq(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001442 __ j(less, &fill);
1443
1444 // Restore function pointer.
1445 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1446 }
1447
1448 // Call the entry point.
1449 __ bind(&invoke);
1450 __ call(rdx);
1451
1452 // Leave frame and return.
1453 LeaveArgumentsAdaptorFrame(masm);
1454 __ ret(0);
1455
1456 // -------------------------------------------
1457 // Dont adapt arguments.
1458 // -------------------------------------------
1459 __ bind(&dont_adapt_arguments);
1460 __ jmp(rdx);
1461}
1462
1463
1464void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001465 // Get the loop depth of the stack guard check. This is recorded in
1466 // a test(rax, depth) instruction right after the call.
1467 Label stack_check;
1468 __ movq(rbx, Operand(rsp, 0)); // return address
1469 __ movzxbq(rbx, Operand(rbx, 1)); // depth
1470
1471 // Get the loop nesting level at which we allow OSR from the
1472 // unoptimized code and check if we want to do OSR yet. If not we
1473 // should perform a stack guard check so we can get interrupts while
1474 // waiting for on-stack replacement.
1475 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1476 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1477 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
1478 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
1479 __ j(greater, &stack_check);
1480
1481 // Pass the function to optimize as the argument to the on-stack
1482 // replacement runtime function.
1483 __ EnterInternalFrame();
1484 __ push(rax);
1485 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1486 __ LeaveInternalFrame();
1487
1488 // If the result was -1 it means that we couldn't optimize the
1489 // function. Just return and continue in the unoptimized version.
Ben Murdoch257744e2011-11-30 15:57:28 +00001490 Label skip;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001491 __ SmiCompare(rax, Smi::FromInt(-1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001492 __ j(not_equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001493 __ ret(0);
1494
1495 // If we decide not to perform on-stack replacement we perform a
1496 // stack guard check to enable interrupts.
1497 __ bind(&stack_check);
Ben Murdoch257744e2011-11-30 15:57:28 +00001498 Label ok;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001499 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001500 __ j(above_equal, &ok, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001501
1502 StackCheckStub stub;
1503 __ TailCallStub(&stub);
1504 __ Abort("Unreachable code: returned from tail call.");
1505 __ bind(&ok);
1506 __ ret(0);
1507
1508 __ bind(&skip);
1509 // Untag the AST id and push it on the stack.
1510 __ SmiToInteger32(rax, rax);
1511 __ push(rax);
1512
1513 // Generate the code for doing the frame-to-frame translation using
1514 // the deoptimizer infrastructure.
1515 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1516 generator.Generate();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001517}
1518
1519
1520#undef __
1521
Steve Blocka7e24c12009-10-30 11:49:00 +00001522} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001523
1524#endif // V8_TARGET_ARCH_X64