blob: 194d8a6b2e834f322520822c84d4a362d1ec107b [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/code-factory.h"
10#include "src/codegen.h"
11#include "src/deoptimizer.h"
12#include "src/full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000013
14namespace v8 {
15namespace internal {
16
Ben Murdochb0fe1622011-05-05 13:52:32 +010017
Steve Blocka7e24c12009-10-30 11:49:00 +000018#define __ ACCESS_MASM(masm)
19
Steve Blocka7e24c12009-10-30 11:49:00 +000020
Leon Clarkee46be812010-01-19 14:06:41 +000021void Builtins::Generate_Adaptor(MacroAssembler* masm,
22 CFunctionId id,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025 // -- rax : number of arguments excluding receiver
26 // -- rdi : called function (only guaranteed when
27 // extra_args requires it)
28 // -- rsi : context
29 // -- rsp[0] : return address
30 // -- rsp[8] : last argument
Leon Clarkee46be812010-01-19 14:06:41 +000031 // -- ...
Ben Murdochb8a8cc12014-11-26 15:28:44 +000032 // -- rsp[8 * argc] : first argument (argc == rax)
33 // -- rsp[8 * (argc + 1)] : receiver
Leon Clarkee46be812010-01-19 14:06:41 +000034 // -----------------------------------
35
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
39 num_extra_args = 1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 __ PopReturnAddressTo(kScratchRegister);
41 __ Push(rdi);
42 __ PushReturnAddressFrom(kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +000043 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
Leon Clarkee46be812010-01-19 14:06:41 +000045 }
46
Steve Block6ded16b2010-05-10 14:33:55 +010047 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000048 // including the receiver and the extra arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000049 __ addp(rax, Immediate(num_extra_args + 1));
Steve Block44f0eee2011-05-26 01:26:41 +010050 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000051}
52
53
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054static void CallRuntimePassFunction(
55 MacroAssembler* masm, Runtime::FunctionId function_id) {
56 FrameScope scope(masm, StackFrame::INTERNAL);
57 // Push a copy of the function onto the stack.
58 __ Push(rdi);
59 // Function is also the parameter to the runtime call.
60 __ Push(rdi);
61
62 __ CallRuntime(function_id, 1);
63 // Restore receiver.
64 __ Pop(rdi);
65}
66
67
68static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
69 __ movp(kScratchRegister,
70 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
71 __ movp(kScratchRegister,
72 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
73 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
74 __ jmp(kScratchRegister);
75}
76
77
78static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
79 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
80 __ jmp(rax);
81}
82
83
84void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
85 // Checking whether the queued function is ready for install is optional,
86 // since we come across interrupts and stack checks elsewhere. However,
87 // not checking may delay installing ready functions, and always checking
88 // would be quite expensive. A good compromise is to first check against
89 // stack limit as a cue for an interrupt signal.
90 Label ok;
91 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
92 __ j(above_equal, &ok);
93
94 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
95 GenerateTailCallToReturnedCode(masm);
96
97 __ bind(&ok);
98 GenerateTailCallToSharedCode(masm);
99}
100
101
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100102static void Generate_JSConstructStubHelper(MacroAssembler* masm,
103 bool is_api_function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 bool create_memento) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 // ----------- S t a t e -------------
106 // -- rax: number of arguments
107 // -- rdi: constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000108 // -- rbx: allocation site or undefined
Steve Blocka7e24c12009-10-30 11:49:00 +0000109 // -----------------------------------
110
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111 // Should never create mementos for api functions.
112 DCHECK(!is_api_function || !create_memento);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100113
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100114 // Enter a construct frame.
115 {
116 FrameScope scope(masm, StackFrame::CONSTRUCT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000117
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 if (create_memento) {
119 __ AssertUndefinedOrAllocationSite(rbx);
120 __ Push(rbx);
121 }
122
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100123 // Store a smi-tagged arguments count on the stack.
124 __ Integer32ToSmi(rax, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 __ Push(rax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000126
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100127 // Push the function to invoke on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 __ Push(rdi);
Steve Blocka7e24c12009-10-30 11:49:00 +0000129
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100130 // Try to allocate the object without transitioning into C code. If any of
131 // the preconditions is not met, the code bails out to the runtime call.
132 Label rt_call, allocated;
133 if (FLAG_inline_new) {
134 Label undo_allocation;
Steve Blocka7e24c12009-10-30 11:49:00 +0000135
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100136 ExternalReference debug_step_in_fp =
137 ExternalReference::debug_step_in_fp_address(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138 __ Move(kScratchRegister, debug_step_in_fp);
139 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100140 __ j(not_equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000141
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100142 // Verified that the constructor is a JSFunction.
143 // Load the initial map and verify that it is in fact a map.
144 // rdi: constructor
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000145 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100146 // Will both indicate a NULL and a Smi
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 DCHECK(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100148 __ JumpIfSmi(rax, &rt_call);
149 // rdi: constructor
150 // rax: initial map (if proven valid below)
151 __ CmpObjectType(rax, MAP_TYPE, rbx);
152 __ j(not_equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000153
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100154 // Check that the constructor is not constructing a JSFunction (see
155 // comments in Runtime_NewObject in runtime.cc). In which case the
156 // initial map's instance type would be JS_FUNCTION_TYPE.
157 // rdi: constructor
158 // rax: initial map
159 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
160 __ j(equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000161
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 if (!is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100163 Label allocate;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000164 // The code below relies on these assumptions.
165 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
166 STATIC_ASSERT(Map::ConstructionCount::kShift +
167 Map::ConstructionCount::kSize == 32);
168 // Check if slack tracking is enabled.
169 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
170 __ shrl(rsi, Immediate(Map::ConstructionCount::kShift));
171 __ j(zero, &allocate); // JSFunction::kNoSlackTracking
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100172 // Decrease generous allocation count.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000173 __ subl(FieldOperand(rax, Map::kBitField3Offset),
174 Immediate(1 << Map::ConstructionCount::kShift));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100175
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000176 __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking));
177 __ j(not_equal, &allocate);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100178
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 __ Push(rax);
180 __ Push(rdi);
181
182 __ Push(rdi); // constructor
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100183 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
184
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 __ Pop(rdi);
186 __ Pop(rax);
187 __ xorl(rsi, rsi); // JSFunction::kNoSlackTracking
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100188
189 __ bind(&allocate);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000190 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100191
192 // Now allocate the JSObject on the heap.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
194 __ shlp(rdi, Immediate(kPointerSizeLog2));
195 if (create_memento) {
196 __ addp(rdi, Immediate(AllocationMemento::kSize));
197 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100198 // rdi: size of new object
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 __ Allocate(rdi,
200 rbx,
201 rdi,
202 no_reg,
203 &rt_call,
204 NO_ALLOCATION_FLAGS);
205 Factory* factory = masm->isolate()->factory();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100206 // Allocated the JSObject, now initialize the fields.
207 // rax: initial map
208 // rbx: JSObject (not HeapObject tagged - the actual address).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 // rdi: start of next object (including memento if create_memento)
210 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100211 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
213 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100214 // Set extra fields in the newly allocated object.
215 // rax: initial map
216 // rbx: JSObject
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 // rdi: start of next object (including memento if create_memento)
218 // rsi: slack tracking counter (non-API function case)
219 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
Ben Murdoch85b71792012-04-11 18:30:58 +0100220 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000221 if (!is_api_function) {
222 Label no_inobject_slack_tracking;
223
224 // Check if slack tracking is enabled.
225 __ cmpl(rsi, Immediate(JSFunction::kNoSlackTracking));
226 __ j(equal, &no_inobject_slack_tracking);
227
228 // Allocate object with a slack.
229 __ movzxbp(rsi,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100230 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000231 __ leap(rsi,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100232 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
233 // rsi: offset of first field after pre-allocated fields
234 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000235 __ cmpp(rsi, rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100236 __ Assert(less_equal,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 kUnexpectedNumberOfPreAllocatedPropertyFields);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100238 }
239 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
240 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241 // Fill the remaining fields with one pointer filler map.
242
243 __ bind(&no_inobject_slack_tracking);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100244 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000245 if (create_memento) {
246 __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
247 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
248
249 // Fill in memento fields if necessary.
250 // rsi: points to the allocated but uninitialized memento.
251 __ Move(Operand(rsi, AllocationMemento::kMapOffset),
252 factory->allocation_memento_map());
253 // Get the cell or undefined.
254 __ movp(rdx, Operand(rsp, kPointerSize*2));
255 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
256 } else {
257 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
258 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100259
260 // Add the object tag to make the JSObject real, so that we can continue
261 // and jump into the continuation code at any time from now on. Any
262 // failures need to undo the allocation, so that the heap is in a
263 // consistent state and verifiable.
264 // rax: initial map
265 // rbx: JSObject
266 // rdi: start of next object
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267 __ orp(rbx, Immediate(kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100268
269 // Check if a non-empty properties array is needed.
270 // Allocate and initialize a FixedArray if it is.
271 // rax: initial map
272 // rbx: JSObject
273 // rdi: start of next object
274 // Calculate total properties described map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 __ movzxbp(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
276 __ movzxbp(rcx,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100277 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 __ addp(rdx, rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100279 // Calculate unused properties past the end of the in-object properties.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 __ movzxbp(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
281 __ subp(rdx, rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100282 // Done if no extra properties are to be allocated.
283 __ j(zero, &allocated);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000284 __ Assert(positive, kPropertyAllocationCountFailed);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100285
286 // Scale the number of elements by pointer size and add the header for
287 // FixedArrays to the start of the next object calculation from above.
288 // rbx: JSObject
289 // rdi: start of next object (will be start of FixedArray)
290 // rdx: number of elements in properties array
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 __ Allocate(FixedArray::kHeaderSize,
292 times_pointer_size,
293 rdx,
294 rdi,
295 rax,
296 no_reg,
297 &undo_allocation,
298 RESULT_CONTAINS_TOP);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100299
300 // Initialize the FixedArray.
301 // rbx: JSObject
302 // rdi: FixedArray
303 // rdx: number of elements
304 // rax: start of next object
305 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306 __ movp(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100307 __ Integer32ToSmi(rdx, rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100309
310 // Initialize the fields to undefined.
311 // rbx: JSObject
312 // rdi: FixedArray
313 // rax: start of next object
314 // rdx: number of elements
315 { Label loop, entry;
316 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 __ leap(rcx, Operand(rdi, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100318 __ jmp(&entry);
319 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000320 __ movp(Operand(rcx, 0), rdx);
321 __ addp(rcx, Immediate(kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100322 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323 __ cmpp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100324 __ j(below, &loop);
325 }
326
327 // Store the initialized FixedArray into the properties field of
328 // the JSObject
329 // rbx: JSObject
330 // rdi: FixedArray
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 __ orp(rdi, Immediate(kHeapObjectTag)); // add the heap tag
332 __ movp(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100333
334
335 // Continue with JSObject being successfully allocated
336 // rbx: JSObject
337 __ jmp(&allocated);
338
339 // Undo the setting of the new top so that the heap is verifiable. For
340 // example, the map's unused properties potentially do not match the
341 // allocated objects unused properties.
342 // rbx: JSObject (previous new top)
343 __ bind(&undo_allocation);
344 __ UndoAllocationInNewSpace(rbx);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000345 }
346
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100347 // Allocate the new receiver object using the runtime call.
348 // rdi: function (constructor)
349 __ bind(&rt_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350 int offset = 0;
351 if (create_memento) {
352 // Get the cell or allocation site.
353 __ movp(rdi, Operand(rsp, kPointerSize*2));
354 __ Push(rdi);
355 offset = kPointerSize;
356 }
357
358 // Must restore rsi (context) and rdi (constructor) before calling runtime.
359 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
360 __ movp(rdi, Operand(rsp, offset));
361 __ Push(rdi);
362 if (create_memento) {
363 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
364 } else {
365 __ CallRuntime(Runtime::kNewObject, 1);
366 }
367 __ movp(rbx, rax); // store result in rbx
368
369 // If we ended up using the runtime, and we want a memento, then the
370 // runtime call made it for us, and we shouldn't do create count
371 // increment.
372 Label count_incremented;
373 if (create_memento) {
374 __ jmp(&count_incremented);
375 }
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +0100376
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100377 // New object allocated.
378 // rbx: newly allocated object
379 __ bind(&allocated);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000380
381 if (create_memento) {
382 __ movp(rcx, Operand(rsp, kPointerSize*2));
383 __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
384 __ j(equal, &count_incremented);
385 // rcx is an AllocationSite. We are creating a memento from it, so we
386 // need to increment the memento create count.
387 __ SmiAddConstant(
388 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
389 Smi::FromInt(1));
390 __ bind(&count_incremented);
391 }
392
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100393 // Retrieve the function from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000394 __ Pop(rdi);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000395
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100396 // Retrieve smi-tagged arguments count from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000397 __ movp(rax, Operand(rsp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100398 __ SmiToInteger32(rax, rax);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000399
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100400 // Push the allocated receiver to the stack. We need two copies
401 // because we may have to return the original one and the calling
402 // conventions dictate that the called function pops the receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000403 __ Push(rbx);
404 __ Push(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100405
406 // Set up pointer to last argument.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100408
409 // Copy arguments and receiver to the expression stack.
410 Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000411 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100412 __ jmp(&entry);
413 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000414 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100415 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000416 __ decp(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100417 __ j(greater_equal, &loop);
418
419 // Call the function.
420 if (is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000421 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100422 Handle<Code> code =
423 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000424 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100425 } else {
426 ParameterCount actual(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100428 }
429
430 // Store offset of return address for deoptimizer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000431 if (!is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100432 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
433 }
434
435 // Restore context from the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000436 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100437
438 // If the result is an object (in the ECMA sense), we should get rid
439 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
440 // on page 74.
441 Label use_receiver, exit;
442 // If the result is a smi, it is *not* an object in the ECMA sense.
443 __ JumpIfSmi(rax, &use_receiver);
444
445 // If the type of the result (stored in its map) is less than
446 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
448 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
449 __ j(above_equal, &exit);
450
451 // Throw away the result of the constructor invocation and use the
452 // on-stack receiver as the result.
453 __ bind(&use_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000454 __ movp(rax, Operand(rsp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100455
456 // Restore the arguments count and leave the construct frame.
457 __ bind(&exit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100459
460 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000461 }
462
Steve Blocka7e24c12009-10-30 11:49:00 +0000463 // Remove caller arguments from the stack and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000464 __ PopReturnAddressTo(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000465 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
467 __ PushReturnAddressFrom(rcx);
Steve Block44f0eee2011-05-26 01:26:41 +0100468 Counters* counters = masm->isolate()->counters();
469 __ IncrementCounter(counters->constructed_objects(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000470 __ ret(0);
471}
472
473
Leon Clarkee46be812010-01-19 14:06:41 +0000474void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Leon Clarkee46be812010-01-19 14:06:41 +0000476}
477
478
479void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100480 Generate_JSConstructStubHelper(masm, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000481}
482
483
Steve Blocka7e24c12009-10-30 11:49:00 +0000484static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
485 bool is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000486 ProfileEntryHookStub::MaybeCallEntryHook(masm);
487
Steve Blocka7e24c12009-10-30 11:49:00 +0000488 // Expects five C++ function parameters.
489 // - Address entry (ignored)
490 // - JSFunction* function (
491 // - Object* receiver
492 // - int argc
493 // - Object*** argv
494 // (see Handle::Invoke in execution.cc).
495
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100496 // Open a C++ scope for the FrameScope.
497 {
498 // Platform specific argument handling. After this, the stack contains
499 // an internal frame and the pushed function and receiver, and
500 // register rax and rbx holds the argument count and argument array,
501 // while rdi holds the function pointer and rsi the context.
502
Steve Blocka7e24c12009-10-30 11:49:00 +0000503#ifdef _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100504 // MSVC parameters in:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 // rcx : entry (ignored)
506 // rdx : function
507 // r8 : receiver
508 // r9 : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100509 // [rsp+0x20] : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000510
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100511 // Clear the context before we push it when entering the internal frame.
512 __ Set(rsi, 0);
513 // Enter an internal frame.
514 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000515
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100516 // Load the function context into rsi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000518
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100519 // Push the function and the receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 __ Push(rdx);
521 __ Push(r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000522
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100523 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 __ movp(rax, r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525 // Load the previous frame pointer to access C argument on stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000526 __ movp(kScratchRegister, Operand(rbp, 0));
527 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100528 // Load the function pointer into rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529 __ movp(rdi, rdx);
Steve Block6ded16b2010-05-10 14:33:55 +0100530#else // _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100531 // GCC parameters in:
532 // rdi : entry (ignored)
533 // rsi : function
534 // rdx : receiver
535 // rcx : argc
536 // r8 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000537
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000538 __ movp(rdi, rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100539 // rdi : function
Steve Blocka7e24c12009-10-30 11:49:00 +0000540
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100541 // Clear the context before we push it when entering the internal frame.
542 __ Set(rsi, 0);
543 // Enter an internal frame.
544 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000545
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100546 // Push the function and receiver and setup the context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547 __ Push(rdi);
548 __ Push(rdx);
549 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +0000550
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100551 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000552 __ movp(rax, rcx);
553 __ movp(rbx, r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000554#endif // _WIN64
555
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100556 // Current stack contents:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557 // [rsp + 2 * kPointerSize ... ] : Internal frame
558 // [rsp + kPointerSize] : function
559 // [rsp] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100560 // Current register contents:
561 // rax : argc
562 // rbx : argv
563 // rsi : context
564 // rdi : function
Steve Blocka7e24c12009-10-30 11:49:00 +0000565
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100566 // Copy arguments to the stack in a loop.
567 // Register rbx points to array of pointers to handle locations.
568 // Push the values of these handles.
569 Label loop, entry;
570 __ Set(rcx, 0); // Set loop variable to 0.
571 __ jmp(&entry);
572 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000573 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
574 __ Push(Operand(kScratchRegister, 0)); // dereference handle
575 __ addp(rcx, Immediate(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100576 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 __ cmpp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100578 __ j(not_equal, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000579
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100580 // Invoke the code.
581 if (is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582 // No type feedback cell is available
583 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100584 // Expects rdi to hold function pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100586 __ CallStub(&stub);
587 } else {
588 ParameterCount actual(rax);
589 // Function must be in rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000590 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 }
592 // Exit the internal frame. Notice that this also removes the empty
593 // context and the function left on the stack by the code
594 // invocation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000595 }
596
Steve Blocka7e24c12009-10-30 11:49:00 +0000597 // TODO(X64): Is argument correct? Is there a receiver to remove?
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100598 __ ret(1 * kPointerSize); // Remove receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000599}
600
601
602void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
603 Generate_JSEntryTrampolineHelper(masm, false);
604}
605
606
607void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
608 Generate_JSEntryTrampolineHelper(masm, true);
609}
610
Iain Merrick75681382010-08-19 15:07:18 +0100611
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
613 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
614 GenerateTailCallToReturnedCode(masm);
Iain Merrick75681382010-08-19 15:07:18 +0100615}
616
Ben Murdochb0fe1622011-05-05 13:52:32 +0100617
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618static void CallCompileOptimized(MacroAssembler* masm,
619 bool concurrent) {
620 FrameScope scope(masm, StackFrame::INTERNAL);
621 // Push a copy of the function onto the stack.
622 __ Push(rdi);
623 // Function is also the parameter to the runtime call.
624 __ Push(rdi);
625 // Whether to compile in a background thread.
626 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
627
628 __ CallRuntime(Runtime::kCompileOptimized, 2);
629 // Restore receiver.
630 __ Pop(rdi);
631}
632
633
634void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
635 CallCompileOptimized(masm, false);
636 GenerateTailCallToReturnedCode(masm);
637}
638
639
640void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
641 CallCompileOptimized(masm, true);
642 GenerateTailCallToReturnedCode(masm);
643}
644
645
646static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
647 // For now, we are relying on the fact that make_code_young doesn't do any
648 // garbage collection which allows us to save/restore the registers without
649 // worrying about which of them contain pointers. We also don't build an
650 // internal frame to make the code faster, since we shouldn't have to do stack
651 // crawls in MakeCodeYoung. This seems a bit fragile.
652
653 // Re-execute the code that was patched back to the young age when
654 // the stub returns.
655 __ subp(Operand(rsp, 0), Immediate(5));
656 __ Pushad();
657 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
658 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
659 { // NOLINT
660 FrameScope scope(masm, StackFrame::MANUAL);
661 __ PrepareCallCFunction(2);
662 __ CallCFunction(
663 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
664 }
665 __ Popad();
666 __ ret(0);
667}
668
669
670#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
671void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
672 MacroAssembler* masm) { \
673 GenerateMakeCodeYoungAgainCommon(masm); \
674} \
675void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
676 MacroAssembler* masm) { \
677 GenerateMakeCodeYoungAgainCommon(masm); \
678}
679CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
680#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
681
682
683void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
684 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
685 // that make_code_young doesn't do any garbage collection which allows us to
686 // save/restore the registers without worrying about which of them contain
687 // pointers.
688 __ Pushad();
689 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
690 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
691 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
692 { // NOLINT
693 FrameScope scope(masm, StackFrame::MANUAL);
694 __ PrepareCallCFunction(2);
695 __ CallCFunction(
696 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
697 2);
698 }
699 __ Popad();
700
701 // Perform prologue operations usually performed by the young code stub.
702 __ PopReturnAddressTo(kScratchRegister);
703 __ pushq(rbp); // Caller's frame pointer.
704 __ movp(rbp, rsp);
705 __ Push(rsi); // Callee's context.
706 __ Push(rdi); // Callee's JS Function.
707 __ PushReturnAddressFrom(kScratchRegister);
708
709 // Jump to point after the code-age stub.
710 __ ret(0);
711}
712
713
714void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
715 GenerateMakeCodeYoungAgainCommon(masm);
716}
717
718
719static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
720 SaveFPRegsMode save_doubles) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100721 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100722 {
723 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100724
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725 // Preserve registers across notification, this is important for compiled
726 // stubs that tail call the runtime on deopts passing their parameters in
727 // registers.
728 __ Pushad();
729 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
730 __ Popad();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100731 // Tear down internal frame.
732 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100733
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000734 __ DropUnderReturnAddress(1); // Ignore state offset
735 __ ret(0); // Return to IC Miss stub, continuation still on stack.
736}
737
738
739void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
740 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
741}
742
743
744void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
745 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100746}
747
748
749static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
750 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +0100751 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100752 {
753 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Block1e0659c2011-05-24 12:43:12 +0100754
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100755 // Pass the deoptimization type to the runtime system.
756 __ Push(Smi::FromInt(static_cast<int>(type)));
Steve Block1e0659c2011-05-24 12:43:12 +0100757
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100758 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
759 // Tear down internal frame.
760 }
Steve Block1e0659c2011-05-24 12:43:12 +0100761
762 // Get the full codegen state from the stack and untag it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
Steve Block1e0659c2011-05-24 12:43:12 +0100764
765 // Switch on the state.
Ben Murdoch257744e2011-11-30 15:57:28 +0000766 Label not_no_registers, not_tos_rax;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000767 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
Ben Murdoch257744e2011-11-30 15:57:28 +0000768 __ j(not_equal, &not_no_registers, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100769 __ ret(1 * kPointerSize); // Remove state.
770
771 __ bind(&not_no_registers);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
773 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
Ben Murdoch257744e2011-11-30 15:57:28 +0000774 __ j(not_equal, &not_tos_rax, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100775 __ ret(2 * kPointerSize); // Remove state, rax.
776
777 __ bind(&not_tos_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000778 __ Abort(kNoCasesLeft);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100779}
780
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781
Ben Murdochb0fe1622011-05-05 13:52:32 +0100782void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
783 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
784}
785
786
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
788 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
789}
790
791
Ben Murdochb0fe1622011-05-05 13:52:32 +0100792void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100793 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100794}
795
796
Ben Murdochb0fe1622011-05-05 13:52:32 +0100797void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
798 // Stack Layout:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 // rsp[0] : Return address
800 // rsp[8] : Argument n
801 // rsp[16] : Argument n-1
Ben Murdochb0fe1622011-05-05 13:52:32 +0100802 // ...
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000803 // rsp[8 * n] : Argument 1
804 // rsp[8 * (n + 1)] : Receiver (function to call)
Ben Murdochb0fe1622011-05-05 13:52:32 +0100805 //
806 // rax contains the number of arguments, n, not counting the receiver.
807 //
808 // 1. Make sure we have at least one argument.
809 { Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000810 __ testp(rax, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100811 __ j(not_zero, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000812 __ PopReturnAddressTo(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100813 __ Push(masm->isolate()->factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000814 __ PushReturnAddressFrom(rbx);
815 __ incp(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100816 __ bind(&done);
817 }
818
819 // 2. Get the function to call (passed as receiver) from the stack, check
820 // if it is a function.
Ben Murdoch589d6972011-11-30 16:04:58 +0000821 Label slow, non_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000822 StackArgumentsAccessor args(rsp, rax);
823 __ movp(rdi, args.GetReceiverOperand());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100824 __ JumpIfSmi(rdi, &non_function);
825 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
Ben Murdoch589d6972011-11-30 16:04:58 +0000826 __ j(not_equal, &slow);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100827
828 // 3a. Patch the first argument if necessary when calling a function.
829 Label shift_arguments;
Ben Murdoch589d6972011-11-30 16:04:58 +0000830 __ Set(rdx, 0); // indicate regular JS_FUNCTION
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 { Label convert_to_object, use_global_proxy, patch_receiver;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100832 // Change context eagerly in case we need the global receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000833 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100834
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100835 // Do not transform the receiver for strict mode functions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000836 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100837 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
838 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
839 __ j(not_equal, &shift_arguments);
840
Ben Murdoch257744e2011-11-30 15:57:28 +0000841 // Do not transform the receiver for natives.
842 // SharedFunctionInfo is already loaded into rbx.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000843 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
844 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
Ben Murdoch257744e2011-11-30 15:57:28 +0000845 __ j(not_zero, &shift_arguments);
846
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847 // Compute the receiver in sloppy mode.
848 __ movp(rbx, args.GetArgumentOperand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000849 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100850
851 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852 __ j(equal, &use_global_proxy);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100853 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854 __ j(equal, &use_global_proxy);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100855
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000856 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
857 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +0000858 __ j(above_equal, &shift_arguments);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100859
860 __ bind(&convert_to_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100861 {
862 // Enter an internal frame in order to preserve argument count.
863 FrameScope scope(masm, StackFrame::INTERNAL);
864 __ Integer32ToSmi(rax, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000865 __ Push(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100866
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000867 __ Push(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100868 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000869 __ movp(rbx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100870 __ Set(rdx, 0); // indicate regular JS_FUNCTION
Ben Murdochb0fe1622011-05-05 13:52:32 +0100871
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000872 __ Pop(rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100873 __ SmiToInteger32(rax, rax);
874 }
875
Ben Murdochb0fe1622011-05-05 13:52:32 +0100876 // Restore the function to rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877 __ movp(rdi, args.GetReceiverOperand());
Ben Murdoch257744e2011-11-30 15:57:28 +0000878 __ jmp(&patch_receiver, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100879
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880 __ bind(&use_global_proxy);
881 __ movp(rbx,
882 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
883 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100884
885 __ bind(&patch_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ movp(args.GetArgumentOperand(1), rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100887
888 __ jmp(&shift_arguments);
889 }
890
Ben Murdoch589d6972011-11-30 16:04:58 +0000891 // 3b. Check for function proxy.
892 __ bind(&slow);
893 __ Set(rdx, 1); // indicate function proxy
894 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
895 __ j(equal, &shift_arguments);
896 __ bind(&non_function);
897 __ Set(rdx, 2); // indicate non-function
Ben Murdochb0fe1622011-05-05 13:52:32 +0100898
Ben Murdoch589d6972011-11-30 16:04:58 +0000899 // 3c. Patch the first argument when calling a non-function. The
Ben Murdochb0fe1622011-05-05 13:52:32 +0100900 // CALL_NON_FUNCTION builtin expects the non-function callee as
901 // receiver, so overwrite the first argument which will ultimately
902 // become the receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000903 __ movp(args.GetArgumentOperand(1), rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100904
905 // 4. Shift arguments and return address one slot down on the stack
906 // (overwriting the original receiver). Adjust argument count to make
907 // the original first argument the new receiver.
908 __ bind(&shift_arguments);
909 { Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000910 __ movp(rcx, rax);
911 StackArgumentsAccessor args(rsp, rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100912 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000913 __ movp(rbx, args.GetArgumentOperand(1));
914 __ movp(args.GetArgumentOperand(0), rbx);
915 __ decp(rcx);
916 __ j(not_zero, &loop); // While non-zero.
917 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
918 __ decp(rax); // One fewer argument (first argument is new receiver).
Ben Murdochb0fe1622011-05-05 13:52:32 +0100919 }
920
Ben Murdoch589d6972011-11-30 16:04:58 +0000921 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
922 // or a function proxy via CALL_FUNCTION_PROXY.
923 { Label function, non_proxy;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000924 __ testp(rdx, rdx);
Ben Murdoch589d6972011-11-30 16:04:58 +0000925 __ j(zero, &function);
Steve Block9fac8402011-05-12 15:51:54 +0100926 __ Set(rbx, 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000927 __ cmpp(rdx, Immediate(1));
Ben Murdoch589d6972011-11-30 16:04:58 +0000928 __ j(not_equal, &non_proxy);
929
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000930 __ PopReturnAddressTo(rdx);
931 __ Push(rdi); // re-add proxy object as additional argument
932 __ PushReturnAddressFrom(rdx);
933 __ incp(rax);
Ben Murdoch589d6972011-11-30 16:04:58 +0000934 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
935 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
936 RelocInfo::CODE_TARGET);
937
938 __ bind(&non_proxy);
939 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +0100940 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100941 RelocInfo::CODE_TARGET);
942 __ bind(&function);
943 }
944
945 // 5b. Get the code to call from the function and check that the number of
946 // expected arguments matches what we're providing. If so, jump
947 // (tail-call) to the code in register edx without checking arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000948 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
949 __ LoadSharedFunctionInfoSpecialField(rbx, rdx,
950 SharedFunctionInfo::kFormalParameterCountOffset);
951 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
952 __ cmpp(rax, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100953 __ j(not_equal,
Steve Block44f0eee2011-05-26 01:26:41 +0100954 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
Ben Murdochb0fe1622011-05-05 13:52:32 +0100955 RelocInfo::CODE_TARGET);
956
957 ParameterCount expected(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100959}
960
961
962void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
963 // Stack at entry:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000964 // rsp : return address
965 // rsp[8] : arguments
966 // rsp[16] : receiver ("this")
967 // rsp[24] : function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100968 {
969 FrameScope frame_scope(masm, StackFrame::INTERNAL);
970 // Stack frame:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000971 // rbp : Old base pointer
972 // rbp[8] : return address
973 // rbp[16] : function arguments
974 // rbp[24] : receiver
975 // rbp[32] : function
976 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
977 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
978 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
Ben Murdoch589d6972011-11-30 16:04:58 +0000979
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000980 __ Push(Operand(rbp, kFunctionOffset));
981 __ Push(Operand(rbp, kArgumentsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100982 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100983
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100984 // Check the stack for overflow. We are not trying to catch
985 // interruptions (e.g. debug break and preemption) here, so the "real stack
986 // limit" is checked.
987 Label okay;
988 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000989 __ movp(rcx, rsp);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100990 // Make rcx the space we have left. The stack might already be overflowed
991 // here which will cause rcx to become negative.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992 __ subp(rcx, kScratchRegister);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100993 // Make rdx the space we need for the array when it is unrolled onto the
994 // stack.
995 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
996 // Check if the arguments will overflow the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000997 __ cmpp(rcx, rdx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100998 __ j(greater, &okay); // Signed comparison.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100999
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001000 // Out of stack space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001 __ Push(Operand(rbp, kFunctionOffset));
1002 __ Push(rax);
1003 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001004 __ bind(&okay);
1005 // End of stack check.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001006
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001007 // Push current index and limit.
1008 const int kLimitOffset =
1009 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1010 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001011 __ Push(rax); // limit
1012 __ Push(Immediate(0)); // index
Ben Murdochb0fe1622011-05-05 13:52:32 +01001013
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001014 // Get the receiver.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001015 __ movp(rbx, Operand(rbp, kReceiverOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001016
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001017 // Check that the function is a JS function (otherwise it must be a proxy).
1018 Label push_receiver;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019 __ movp(rdi, Operand(rbp, kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001020 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1021 __ j(not_equal, &push_receiver);
Ben Murdoch589d6972011-11-30 16:04:58 +00001022
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001023 // Change context eagerly to get the right global object if necessary.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001024 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +00001025
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001026 // Do not transform the receiver for strict mode functions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001027 Label call_to_object, use_global_proxy;
1028 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001029 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
1030 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1031 __ j(not_equal, &push_receiver);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001032
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001033 // Do not transform the receiver for natives.
1034 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
1035 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1036 __ j(not_equal, &push_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00001037
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001038 // Compute the receiver in sloppy mode.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001039 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
1040 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041 __ j(equal, &use_global_proxy);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001042 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043 __ j(equal, &use_global_proxy);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001044
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001045 // If given receiver is already a JavaScript object then there's no
1046 // reason for converting it.
1047 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1048 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1049 __ j(above_equal, &push_receiver);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001050
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001051 // Convert the receiver to an object.
1052 __ bind(&call_to_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001053 __ Push(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001054 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001055 __ movp(rbx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001056 __ jmp(&push_receiver, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001057
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001058 __ bind(&use_global_proxy);
1059 __ movp(rbx,
1060 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1061 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001062
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001063 // Push the receiver.
1064 __ bind(&push_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001065 __ Push(rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001066
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001067 // Copy all arguments from the array to the stack.
1068 Label entry, loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001069 Register receiver = LoadDescriptor::ReceiverRegister();
1070 Register key = LoadDescriptor::NameRegister();
1071 __ movp(key, Operand(rbp, kIndexOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001072 __ jmp(&entry);
1073 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074 __ movp(receiver, Operand(rbp, kArgumentsOffset)); // load arguments
Ben Murdochb0fe1622011-05-05 13:52:32 +01001075
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001076 // Use inline caching to speed up access to arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 if (FLAG_vector_ics) {
1078 __ Move(VectorLoadICDescriptor::SlotRegister(), Smi::FromInt(0));
1079 }
1080 Handle<Code> ic = CodeFactory::KeyedLoadIC(masm->isolate()).code();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001081 __ Call(ic, RelocInfo::CODE_TARGET);
1082 // It is important that we do not have a test instruction after the
1083 // call. A test instruction after the call is used to indicate that
1084 // we have generated an inline version of the keyed load. In this
1085 // case, we know that we are not generating a test instruction next.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001086
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001087 // Push the nth argument.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001088 __ Push(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001090 // Update the index on the stack and in register key.
1091 __ movp(key, Operand(rbp, kIndexOffset));
1092 __ SmiAddConstant(key, key, Smi::FromInt(1));
1093 __ movp(Operand(rbp, kIndexOffset), key);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001094
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001095 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001096 __ cmpp(key, Operand(rbp, kLimitOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001097 __ j(not_equal, &loop);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001098
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001099 // Call the function.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001100 Label call_proxy;
1101 ParameterCount actual(rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 __ SmiToInteger32(rax, key);
1103 __ movp(rdi, Operand(rbp, kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001104 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1105 __ j(not_equal, &call_proxy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001106 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001107
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001108 frame_scope.GenerateLeaveFrame();
1109 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
Ben Murdoch589d6972011-11-30 16:04:58 +00001110
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001111 // Call the function proxy.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001112 __ bind(&call_proxy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001113 __ Push(rdi); // add function proxy as last argument
1114 __ incp(rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001115 __ Set(rbx, 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001116 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1117 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1118 RelocInfo::CODE_TARGET);
Ben Murdoch589d6972011-11-30 16:04:58 +00001119
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001120 // Leave internal frame.
1121 }
Ben Murdoch589d6972011-11-30 16:04:58 +00001122 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
Ben Murdochb0fe1622011-05-05 13:52:32 +01001123}
1124
1125
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001126void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1127 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001128 // -- rax : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001129 // -- rsp[0] : return address
1130 // -- rsp[8] : last argument
1131 // -----------------------------------
1132 Label generic_array_code;
1133
1134 // Get the InternalArray function.
1135 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1136
1137 if (FLAG_debug_code) {
1138 // Initial map for the builtin InternalArray functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001139 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001140 // Will both indicate a NULL and a Smi.
1141 STATIC_ASSERT(kSmiTag == 0);
1142 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001143 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001144 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001145 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001146 }
1147
1148 // Run the native code for the InternalArray function called as a normal
1149 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 // tail call a stub
1151 InternalArrayConstructorStub stub(masm->isolate());
1152 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001153}
1154
1155
1156void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1157 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001158 // -- rax : argc
Ben Murdochb0fe1622011-05-05 13:52:32 +01001159 // -- rsp[0] : return address
1160 // -- rsp[8] : last argument
1161 // -----------------------------------
1162 Label generic_array_code;
1163
1164 // Get the Array function.
1165 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1166
1167 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001168 // Initial map for the builtin Array functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001169 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001170 // Will both indicate a NULL and a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001171 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001172 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001174 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001175 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001176 }
1177
1178 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001179 // tail call a stub
1180 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1181 ArrayConstructorStub stub(masm->isolate());
1182 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001183}
1184
1185
1186void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001187 // ----------- S t a t e -------------
1188 // -- rax : number of arguments
1189 // -- rdi : constructor function
1190 // -- rsp[0] : return address
1191 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1192 // -- rsp[(argc + 1) * 8] : receiver
1193 // -----------------------------------
1194 Counters* counters = masm->isolate()->counters();
1195 __ IncrementCounter(counters->string_ctor_calls(), 1);
1196
1197 if (FLAG_debug_code) {
1198 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001199 __ cmpp(rdi, rcx);
1200 __ Assert(equal, kUnexpectedStringFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001201 }
1202
1203 // Load the first argument into rax and get rid of the rest
1204 // (including the receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 StackArgumentsAccessor args(rsp, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001206 Label no_arguments;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001207 __ testp(rax, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001208 __ j(zero, &no_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001209 __ movp(rbx, args.GetArgumentOperand(1));
1210 __ PopReturnAddressTo(rcx);
1211 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1212 __ PushReturnAddressFrom(rcx);
1213 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001214
1215 // Lookup the argument in the number to string cache.
1216 Label not_cached, argument_is_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 __ LookupNumberStringCache(rax, // Input.
1218 rbx, // Result.
1219 rcx, // Scratch 1.
1220 rdx, // Scratch 2.
1221 &not_cached);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001222 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1223 __ bind(&argument_is_string);
1224
1225 // ----------- S t a t e -------------
1226 // -- rbx : argument converted to string
1227 // -- rdi : constructor function
1228 // -- rsp[0] : return address
1229 // -----------------------------------
1230
1231 // Allocate a JSValue and put the tagged pointer into rax.
1232 Label gc_required;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233 __ Allocate(JSValue::kSize,
1234 rax, // Result.
1235 rcx, // New allocation top (we ignore it).
1236 no_reg,
1237 &gc_required,
1238 TAG_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001239
1240 // Set the map.
1241 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1242 if (FLAG_debug_code) {
1243 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1244 Immediate(JSValue::kSize >> kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001245 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001246 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001247 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001248 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001249 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001250
1251 // Set properties and elements.
1252 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001253 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1254 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001255
1256 // Set the value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001258
1259 // Ensure the object is fully initialized.
1260 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1261
1262 // We're done. Return.
1263 __ ret(0);
1264
1265 // The argument was not found in the number to string cache. Check
1266 // if it's a string already before calling the conversion builtin.
1267 Label convert_argument;
1268 __ bind(&not_cached);
1269 STATIC_ASSERT(kSmiTag == 0);
1270 __ JumpIfSmi(rax, &convert_argument);
1271 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1272 __ j(NegateCondition(is_string), &convert_argument);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001273 __ movp(rbx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001274 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1275 __ jmp(&argument_is_string);
1276
1277 // Invoke the conversion builtin and put the result into rbx.
1278 __ bind(&convert_argument);
1279 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1280 {
1281 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001282 __ Push(rdi); // Preserve the function.
1283 __ Push(rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001284 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 __ Pop(rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001286 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001287 __ movp(rbx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001288 __ jmp(&argument_is_string);
1289
1290 // Load the empty string into rbx, remove the receiver from the
1291 // stack, and jump back to the case where the argument is a string.
1292 __ bind(&no_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1294 __ PopReturnAddressTo(rcx);
1295 __ leap(rsp, Operand(rsp, kPointerSize));
1296 __ PushReturnAddressFrom(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001297 __ jmp(&argument_is_string);
1298
1299 // At this point the argument is already a string. Call runtime to
1300 // create a string wrapper.
1301 __ bind(&gc_required);
1302 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1303 {
1304 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001305 __ Push(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001306 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1307 }
1308 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001309}
1310
1311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1313 Label* stack_overflow) {
1314 // ----------- S t a t e -------------
1315 // -- rax : actual number of arguments
1316 // -- rbx : expected number of arguments
1317 // -- rdi: function (passed through to callee)
1318 // -----------------------------------
1319 // Check the stack for overflow. We are not trying to catch
1320 // interruptions (e.g. debug break and preemption) here, so the "real stack
1321 // limit" is checked.
1322 Label okay;
1323 __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
1324 __ movp(rcx, rsp);
1325 // Make rcx the space we have left. The stack might already be overflowed
1326 // here which will cause rcx to become negative.
1327 __ subp(rcx, rdx);
1328 // Make rdx the space we need for the array when it is unrolled onto the
1329 // stack.
1330 __ movp(rdx, rbx);
1331 __ shlp(rdx, Immediate(kPointerSizeLog2));
1332 // Check if the arguments will overflow the stack.
1333 __ cmpp(rcx, rdx);
1334 __ j(less_equal, stack_overflow); // Signed comparison.
1335}
1336
1337
Ben Murdochb0fe1622011-05-05 13:52:32 +01001338static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001339 __ pushq(rbp);
1340 __ movp(rbp, rsp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001341
1342 // Store the arguments adaptor context sentinel.
1343 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1344
1345 // Push the function on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 __ Push(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001347
Ben Murdoch257744e2011-11-30 15:57:28 +00001348 // Preserve the number of arguments on the stack. Must preserve rax,
1349 // rbx and rcx because these registers are used when copying the
Ben Murdochb0fe1622011-05-05 13:52:32 +01001350 // arguments and the receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00001351 __ Integer32ToSmi(r8, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352 __ Push(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001353}
1354
1355
1356static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1357 // Retrieve the number of arguments from the stack. Number is a Smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001358 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001359
1360 // Leave the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001361 __ movp(rsp, rbp);
1362 __ popq(rbp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001363
1364 // Remove caller arguments from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001365 __ PopReturnAddressTo(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001366 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001367 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1368 __ PushReturnAddressFrom(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001369}
1370
1371
1372void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1373 // ----------- S t a t e -------------
1374 // -- rax : actual number of arguments
1375 // -- rbx : expected number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001376 // -- rdi: function (passed through to callee)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001377 // -----------------------------------
1378
1379 Label invoke, dont_adapt_arguments;
Steve Block44f0eee2011-05-26 01:26:41 +01001380 Counters* counters = masm->isolate()->counters();
1381 __ IncrementCounter(counters->arguments_adaptors(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001382
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001383 Label stack_overflow;
1384 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
1385
Ben Murdochb0fe1622011-05-05 13:52:32 +01001386 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001387 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1388 __ cmpp(rax, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001389 __ j(less, &too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001390 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001391 __ j(equal, &dont_adapt_arguments);
1392
1393 { // Enough parameters: Actual >= expected.
1394 __ bind(&enough);
1395 EnterArgumentsAdaptorFrame(masm);
1396
1397 // Copy receiver and all expected arguments.
1398 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001399 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001400 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001401
1402 Label copy;
1403 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404 __ incp(r8);
1405 __ Push(Operand(rax, 0));
1406 __ subp(rax, Immediate(kPointerSize));
1407 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001408 __ j(less, &copy);
1409 __ jmp(&invoke);
1410 }
1411
1412 { // Too few parameters: Actual < expected.
1413 __ bind(&too_few);
1414 EnterArgumentsAdaptorFrame(masm);
1415
1416 // Copy receiver and all actual arguments.
1417 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001418 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001419 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001420
1421 Label copy;
1422 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001423 __ incp(r8);
1424 __ Push(Operand(rdi, 0));
1425 __ subp(rdi, Immediate(kPointerSize));
1426 __ cmpp(r8, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001427 __ j(less, &copy);
1428
1429 // Fill remaining expected arguments with undefined values.
1430 Label fill;
1431 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1432 __ bind(&fill);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433 __ incp(r8);
1434 __ Push(kScratchRegister);
1435 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001436 __ j(less, &fill);
1437
1438 // Restore function pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001439 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001440 }
1441
1442 // Call the entry point.
1443 __ bind(&invoke);
1444 __ call(rdx);
1445
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001446 // Store offset of return address for deoptimizer.
1447 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1448
Ben Murdochb0fe1622011-05-05 13:52:32 +01001449 // Leave frame and return.
1450 LeaveArgumentsAdaptorFrame(masm);
1451 __ ret(0);
1452
1453 // -------------------------------------------
1454 // Dont adapt arguments.
1455 // -------------------------------------------
1456 __ bind(&dont_adapt_arguments);
1457 __ jmp(rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001458
1459 __ bind(&stack_overflow);
1460 {
1461 FrameScope frame(masm, StackFrame::MANUAL);
1462 EnterArgumentsAdaptorFrame(masm);
1463 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1464 __ int3();
1465 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001466}
1467
1468
1469void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001470 // Lookup the function in the JavaScript frame.
1471 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001472 {
1473 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001474 // Pass function as argument.
1475 __ Push(rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001476 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1477 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001478
Ben Murdoch257744e2011-11-30 15:57:28 +00001479 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001480 // If the code object is null, just return to the unoptimized code.
1481 __ cmpp(rax, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001482 __ j(not_equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001483 __ ret(0);
1484
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001485 __ bind(&skip);
1486
1487 // Load deoptimization data from the code object.
1488 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1489
1490 // Load the OSR entrypoint offset from the deoptimization data.
1491 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1492 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1493
1494 // Compute the target address = code_obj + header_size + osr_offset
1495 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1496
1497 // Overwrite the return address on the stack.
1498 __ movq(StackOperandForReturnAddress(0), rax);
1499
1500 // And "return" to the OSR entry point of the function.
1501 __ ret(0);
1502}
1503
1504
1505void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1506 // We check the stack limit as indicator that recompilation might be done.
Ben Murdoch257744e2011-11-30 15:57:28 +00001507 Label ok;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001508 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001509 __ j(above_equal, &ok);
1510 {
1511 FrameScope scope(masm, StackFrame::INTERNAL);
1512 __ CallRuntime(Runtime::kStackGuard, 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001513 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001514 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1515 RelocInfo::CODE_TARGET);
1516
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001517 __ bind(&ok);
1518 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001519}
1520
1521
1522#undef __
1523
Steve Blocka7e24c12009-10-30 11:49:00 +00001524} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001525
1526#endif // V8_TARGET_ARCH_X64