blob: 6c4419e08426860fcb4fb8aaecfa2c9a679b8610 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-factory.h"
8#include "src/codegen.h"
9#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000011
12namespace v8 {
13namespace internal {
14
Ben Murdochb0fe1622011-05-05 13:52:32 +010015
Steve Blocka7e24c12009-10-30 11:49:00 +000016#define __ ACCESS_MASM(masm)
17
Steve Blocka7e24c12009-10-30 11:49:00 +000018
Leon Clarkee46be812010-01-19 14:06:41 +000019void Builtins::Generate_Adaptor(MacroAssembler* masm,
20 CFunctionId id,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // -- rax : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 // -- rdi : target
25 // -- rdx : new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026 // -- rsp[0] : return address
27 // -- rsp[8] : last argument
Leon Clarkee46be812010-01-19 14:06:41 +000028 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 // -- rsp[8 * argc] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000030 // -- rsp[8 * (argc + 1)] : receiver
Leon Clarkee46be812010-01-19 14:06:41 +000031 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(rdi);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000039
40 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 if (extra_args != BuiltinExtraArguments::kNone) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 __ PopReturnAddressTo(kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 if (extra_args & BuiltinExtraArguments::kTarget) {
45 ++num_extra_args;
46 __ Push(rdi);
47 }
48 if (extra_args & BuiltinExtraArguments::kNewTarget) {
49 ++num_extra_args;
50 __ Push(rdx);
51 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 __ PushReturnAddressFrom(kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +000053 }
54
Steve Block6ded16b2010-05-10 14:33:55 +010055 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000056 // including the receiver and the extra arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 __ addp(rax, Immediate(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058
59 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000060}
61
62
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
64 __ movp(kScratchRegister,
65 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
66 __ movp(kScratchRegister,
67 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
68 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
69 __ jmp(kScratchRegister);
70}
71
Ben Murdoch097c5b22016-05-18 11:27:45 +010072static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
73 Runtime::FunctionId function_id) {
74 // ----------- S t a t e -------------
75 // -- rax : argument count (preserved for callee)
76 // -- rdx : new target (preserved for callee)
77 // -- rdi : target function (preserved for callee)
78 // -----------------------------------
79 {
80 FrameScope scope(masm, StackFrame::INTERNAL);
81 // Push the number of arguments to the callee.
82 __ Integer32ToSmi(rax, rax);
83 __ Push(rax);
84 // Push a copy of the target function and the new target.
85 __ Push(rdi);
86 __ Push(rdx);
87 // Function is also the parameter to the runtime call.
88 __ Push(rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089
Ben Murdoch097c5b22016-05-18 11:27:45 +010090 __ CallRuntime(function_id, 1);
91 __ movp(rbx, rax);
92
93 // Restore target function and new target.
94 __ Pop(rdx);
95 __ Pop(rdi);
96 __ Pop(rax);
97 __ SmiToInteger32(rax, rax);
98 }
99 __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
100 __ jmp(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101}
102
103
104void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
105 // Checking whether the queued function is ready for install is optional,
106 // since we come across interrupts and stack checks elsewhere. However,
107 // not checking may delay installing ready functions, and always checking
108 // would be quite expensive. A good compromise is to first check against
109 // stack limit as a cue for an interrupt signal.
110 Label ok;
111 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
112 __ j(above_equal, &ok);
113
Ben Murdoch097c5b22016-05-18 11:27:45 +0100114 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000115
116 __ bind(&ok);
117 GenerateTailCallToSharedCode(masm);
118}
119
120
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100121static void Generate_JSConstructStubHelper(MacroAssembler* masm,
122 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100123 bool create_implicit_receiver,
124 bool check_derived_construct) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000125 // ----------- S t a t e -------------
126 // -- rax: number of arguments
127 // -- rdi: constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 // -- rbx: allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000129 // -- rdx: new target
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 // -----------------------------------
131
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100132 // Enter a construct frame.
133 {
134 FrameScope scope(masm, StackFrame::CONSTRUCT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000135
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000136 // Preserve the incoming parameters on the stack.
137 __ AssertUndefinedOrAllocationSite(rbx);
138 __ Push(rbx);
139 __ Integer32ToSmi(rcx, rax);
140 __ Push(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000141
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100143 // Allocate the new receiver object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 __ Push(rdi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 __ Push(rdx);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100146 FastNewObjectStub stub(masm->isolate());
147 __ CallStub(&stub);
148 __ movp(rbx, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000149 __ Pop(rdx);
150 __ Pop(rdi);
151
Ben Murdoch097c5b22016-05-18 11:27:45 +0100152 // ----------- S t a t e -------------
153 // -- rdi: constructor function
154 // -- rbx: newly allocated object
155 // -- rdx: new target
156 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000157
158 // Retrieve smi-tagged arguments count from the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100159 __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 }
161
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000162 if (create_implicit_receiver) {
163 // Push the allocated receiver to the stack. We need two copies
164 // because we may have to return the original one and the calling
165 // conventions dictate that the called function pops the receiver.
166 __ Push(rbx);
167 __ Push(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100171
172 // Set up pointer to last argument.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000173 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100174
175 // Copy arguments and receiver to the expression stack.
176 Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000177 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100178 __ jmp(&entry);
179 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100181 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182 __ decp(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100183 __ j(greater_equal, &loop);
184
185 // Call the function.
186 if (is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100188 Handle<Code> code =
189 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000190 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100191 } else {
192 ParameterCount actual(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000193 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION,
194 CheckDebugStepCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100195 }
196
197 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000198 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100199 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
200 }
201
202 // Restore context from the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000203 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100204
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000205 if (create_implicit_receiver) {
206 // If the result is an object (in the ECMA sense), we should get rid
207 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
208 // on page 74.
209 Label use_receiver, exit;
210 // If the result is a smi, it is *not* an object in the ECMA sense.
211 __ JumpIfSmi(rax, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100212
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000213 // If the type of the result (stored in its map) is less than
214 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
215 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
216 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
217 __ j(above_equal, &exit);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100218
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000219 // Throw away the result of the constructor invocation and use the
220 // on-stack receiver as the result.
221 __ bind(&use_receiver);
222 __ movp(rax, Operand(rsp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224 // Restore the arguments count and leave the construct frame. The
225 // arguments count is stored below the receiver.
226 __ bind(&exit);
227 __ movp(rbx, Operand(rsp, 1 * kPointerSize));
228 } else {
229 __ movp(rbx, Operand(rsp, 0));
230 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100231
232 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000233 }
234
Ben Murdoch097c5b22016-05-18 11:27:45 +0100235 // ES6 9.2.2. Step 13+
236 // Check that the result is not a Smi, indicating that the constructor result
237 // from a derived class is neither undefined nor an Object.
238 if (check_derived_construct) {
239 Label dont_throw;
240 __ JumpIfNotSmi(rax, &dont_throw);
241 {
242 FrameScope scope(masm, StackFrame::INTERNAL);
243 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
244 }
245 __ bind(&dont_throw);
246 }
247
Steve Blocka7e24c12009-10-30 11:49:00 +0000248 // Remove caller arguments from the stack and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 __ PopReturnAddressTo(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000250 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
252 __ PushReturnAddressFrom(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000253 if (create_implicit_receiver) {
254 Counters* counters = masm->isolate()->counters();
255 __ IncrementCounter(counters->constructed_objects(), 1);
256 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000257 __ ret(0);
258}
259
260
Leon Clarkee46be812010-01-19 14:06:41 +0000261void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100262 Generate_JSConstructStubHelper(masm, false, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000263}
264
265
266void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100267 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268}
269
270
271void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100272 Generate_JSConstructStubHelper(masm, false, false, false);
273}
274
275
276void Builtins::Generate_JSBuiltinsConstructStubForDerived(
277 MacroAssembler* masm) {
278 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279}
280
281
282void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
283 FrameScope scope(masm, StackFrame::INTERNAL);
284 __ Push(rdi);
285 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
286}
287
288
289enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
290
291
292// Clobbers rcx, r11, kScratchRegister; preserves all other registers.
293static void Generate_CheckStackOverflow(MacroAssembler* masm,
294 IsTagged rax_is_tagged) {
295 // rax : the number of items to be pushed to the stack
296 //
297 // Check the stack for overflow. We are not trying to catch
298 // interruptions (e.g. debug break and preemption) here, so the "real stack
299 // limit" is checked.
300 Label okay;
301 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
302 __ movp(rcx, rsp);
303 // Make rcx the space we have left. The stack might already be overflowed
304 // here which will cause rcx to become negative.
305 __ subp(rcx, kScratchRegister);
306 // Make r11 the space we need for the array when it is unrolled onto the
307 // stack.
308 if (rax_is_tagged == kRaxIsSmiTagged) {
309 __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
310 } else {
311 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
312 __ movp(r11, rax);
313 __ shlq(r11, Immediate(kPointerSizeLog2));
314 }
315 // Check if the arguments will overflow the stack.
316 __ cmpp(rcx, r11);
317 __ j(greater, &okay); // Signed comparison.
318
319 // Out of stack space.
320 __ CallRuntime(Runtime::kThrowStackOverflow);
321
322 __ bind(&okay);
Leon Clarkee46be812010-01-19 14:06:41 +0000323}
324
325
Steve Blocka7e24c12009-10-30 11:49:00 +0000326static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
327 bool is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 ProfileEntryHookStub::MaybeCallEntryHook(masm);
329
Steve Blocka7e24c12009-10-30 11:49:00 +0000330 // Expects five C++ function parameters.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000331 // - Object* new_target
332 // - JSFunction* function
Steve Blocka7e24c12009-10-30 11:49:00 +0000333 // - Object* receiver
334 // - int argc
335 // - Object*** argv
336 // (see Handle::Invoke in execution.cc).
337
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100338 // Open a C++ scope for the FrameScope.
339 {
340 // Platform specific argument handling. After this, the stack contains
341 // an internal frame and the pushed function and receiver, and
342 // register rax and rbx holds the argument count and argument array,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343 // while rdi holds the function pointer, rsi the context, and rdx the
344 // new.target.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100345
Steve Blocka7e24c12009-10-30 11:49:00 +0000346#ifdef _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100347 // MSVC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000348 // rcx : new_target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349 // rdx : function
350 // r8 : receiver
351 // r9 : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352 // [rsp+0x20] : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000353
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100354 // Clear the context before we push it when entering the internal frame.
355 __ Set(rsi, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000356
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100357 // Enter an internal frame.
358 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000359
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000360 // Setup the context (we need to use the caller context from the isolate).
361 ExternalReference context_address(Isolate::kContextAddress,
362 masm->isolate());
363 __ movp(rsi, masm->ExternalOperand(context_address));
Steve Blocka7e24c12009-10-30 11:49:00 +0000364
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100365 // Push the function and the receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366 __ Push(rdx);
367 __ Push(r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000368
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100369 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000370 __ movp(rax, r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100371 // Load the previous frame pointer to access C argument on stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000372 __ movp(kScratchRegister, Operand(rbp, 0));
373 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100374 // Load the function pointer into rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000375 __ movp(rdi, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 // Load the new.target into rdx.
377 __ movp(rdx, rcx);
Steve Block6ded16b2010-05-10 14:33:55 +0100378#else // _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100379 // GCC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 // rdi : new_target
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100381 // rsi : function
382 // rdx : receiver
383 // rcx : argc
384 // r8 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000385
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000386 __ movp(r11, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000387 __ movp(rdi, rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100388 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000389 // r11 : new_target
Steve Blocka7e24c12009-10-30 11:49:00 +0000390
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100391 // Clear the context before we push it when entering the internal frame.
392 __ Set(rsi, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000393
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100394 // Enter an internal frame.
395 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000396
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000397 // Setup the context (we need to use the caller context from the isolate).
398 ExternalReference context_address(Isolate::kContextAddress,
399 masm->isolate());
400 __ movp(rsi, masm->ExternalOperand(context_address));
401
402 // Push the function and receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000403 __ Push(rdi);
404 __ Push(rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +0000405
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100406 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 __ movp(rax, rcx);
408 __ movp(rbx, r8);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000409
410 // Load the new.target into rdx.
411 __ movp(rdx, r11);
Steve Blocka7e24c12009-10-30 11:49:00 +0000412#endif // _WIN64
413
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100414 // Current stack contents:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000415 // [rsp + 2 * kPointerSize ... ] : Internal frame
416 // [rsp + kPointerSize] : function
417 // [rsp] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100418 // Current register contents:
419 // rax : argc
420 // rbx : argv
421 // rsi : context
422 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000423 // rdx : new.target
424
425 // Check if we have enough stack space to push all arguments.
426 // Expects argument count in rax. Clobbers rcx, r11.
427 Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
Steve Blocka7e24c12009-10-30 11:49:00 +0000428
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100429 // Copy arguments to the stack in a loop.
430 // Register rbx points to array of pointers to handle locations.
431 // Push the values of these handles.
432 Label loop, entry;
433 __ Set(rcx, 0); // Set loop variable to 0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434 __ jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000436 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
437 __ Push(Operand(kScratchRegister, 0)); // dereference handle
438 __ addp(rcx, Immediate(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100439 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000440 __ cmpp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100441 __ j(not_equal, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000442
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000443 // Invoke the builtin code.
444 Handle<Code> builtin = is_construct
445 ? masm->isolate()->builtins()->Construct()
446 : masm->isolate()->builtins()->Call();
447 __ Call(builtin, RelocInfo::CODE_TARGET);
448
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100449 // Exit the internal frame. Notice that this also removes the empty
450 // context and the function left on the stack by the code
451 // invocation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000452 }
453
Steve Blocka7e24c12009-10-30 11:49:00 +0000454 // TODO(X64): Is argument correct? Is there a receiver to remove?
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100455 __ ret(1 * kPointerSize); // Remove receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000456}
457
458
459void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
460 Generate_JSEntryTrampolineHelper(masm, false);
461}
462
463
464void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
465 Generate_JSEntryTrampolineHelper(masm, true);
466}
467
Iain Merrick75681382010-08-19 15:07:18 +0100468
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469// Generate code for entering a JS function with the interpreter.
470// On entry to the function the receiver and arguments have been pushed on the
471// stack left to right. The actual argument count matches the formal parameter
472// count expected by the function.
473//
474// The live registers are:
475// o rdi: the JS function object being called
476// o rdx: the new target
477// o rsi: our context
478// o rbp: the caller's frame pointer
479// o rsp: stack pointer (pointing to return address)
480//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481// The function builds an interpreter frame. See InterpreterFrameConstants in
482// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
484 // Open a frame scope to indicate that there is a frame on the stack. The
485 // MANUAL indicates that the scope shouldn't actually generate code to set up
486 // the frame (that is done below).
487 FrameScope frame_scope(masm, StackFrame::MANUAL);
488 __ pushq(rbp); // Caller's frame pointer.
489 __ movp(rbp, rsp);
490 __ Push(rsi); // Callee's context.
491 __ Push(rdi); // Callee's JS function.
492 __ Push(rdx); // Callee's new target.
493
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000494 // Get the bytecode array from the function object and load the pointer to the
495 // first entry into edi (InterpreterBytecodeRegister).
496 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100497
498 Label load_debug_bytecode_array, bytecode_array_loaded;
499 DCHECK_EQ(Smi::FromInt(0), DebugInfo::uninitialized());
500 __ cmpp(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
501 Immediate(0));
502 __ j(not_equal, &load_debug_bytecode_array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000503 __ movp(kInterpreterBytecodeArrayRegister,
504 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100505 __ bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000506
507 if (FLAG_debug_code) {
508 // Check function data field is actually a BytecodeArray object.
509 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
510 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
511 rax);
512 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
513 }
514
Ben Murdoch097c5b22016-05-18 11:27:45 +0100515 // Push bytecode array.
516 __ Push(kInterpreterBytecodeArrayRegister);
517 // Push zero for bytecode array offset.
518 __ Push(Immediate(0));
519
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 // Allocate the local and temporary register file on the stack.
521 {
522 // Load frame size from the BytecodeArray object.
523 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
524 BytecodeArray::kFrameSizeOffset));
525
526 // Do a stack check to ensure we don't go over the limit.
527 Label ok;
528 __ movp(rdx, rsp);
529 __ subp(rdx, rcx);
530 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
531 __ j(above_equal, &ok, Label::kNear);
532 __ CallRuntime(Runtime::kThrowStackOverflow);
533 __ bind(&ok);
534
535 // If ok, push undefined as the initial value for all register file entries.
536 Label loop_header;
537 Label loop_check;
538 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
539 __ j(always, &loop_check);
540 __ bind(&loop_header);
541 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
542 __ Push(rdx);
543 // Continue loop if not done.
544 __ bind(&loop_check);
545 __ subp(rcx, Immediate(kPointerSize));
546 __ j(greater_equal, &loop_header, Label::kNear);
547 }
548
549 // TODO(rmcilroy): List of things not currently dealt with here but done in
550 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552 // - Code aging of the BytecodeArray object.
553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 // Load accumulator, register file, bytecode offset, dispatch table into
555 // registers.
556 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
557 __ movp(kInterpreterRegisterFileRegister, rbp);
558 __ addp(kInterpreterRegisterFileRegister,
559 Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
560 __ movp(kInterpreterBytecodeOffsetRegister,
561 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100562 __ Move(
563 kInterpreterDispatchTableRegister,
564 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565
566 // Dispatch to the first bytecode handler for the function.
567 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
568 kInterpreterBytecodeOffsetRegister, times_1, 0));
569 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
570 times_pointer_size, 0));
571 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
572 // and header removal.
573 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
574 __ call(rbx);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100575
576 // Even though the first bytecode handler was called, we will never return.
577 __ Abort(kUnexpectedReturnFromBytecodeHandler);
578
579 // Load debug copy of the bytecode array.
580 __ bind(&load_debug_bytecode_array);
581 Register debug_info = kInterpreterBytecodeArrayRegister;
582 __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
583 __ movp(kInterpreterBytecodeArrayRegister,
584 FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
585 __ jmp(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586}
587
588
589void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
590 // TODO(rmcilroy): List of things not currently dealt with here but done in
591 // fullcodegen's EmitReturnSequence.
592 // - Supporting FLAG_trace for Runtime::TraceExit.
593 // - Support profiler (specifically decrementing profiling_counter
594 // appropriately and calling out to HandleInterrupts if necessary).
595
596 // The return value is in accumulator, which is already in rax.
597
598 // Leave the frame (also dropping the register file).
599 __ leave();
600
601 // Drop receiver + arguments and return.
602 __ movl(rbx, FieldOperand(kInterpreterBytecodeArrayRegister,
603 BytecodeArray::kParameterSizeOffset));
604 __ PopReturnAddressTo(rcx);
605 __ addp(rsp, rbx);
606 __ PushReturnAddressFrom(rcx);
607 __ ret(0);
608}
609
610
611static void Generate_InterpreterPushArgs(MacroAssembler* masm,
612 bool push_receiver) {
613 // ----------- S t a t e -------------
614 // -- rax : the number of arguments (not including the receiver)
615 // -- rbx : the address of the first argument to be pushed. Subsequent
616 // arguments should be consecutive above this, in the same order as
617 // they are to be pushed onto the stack.
618 // -----------------------------------
619
620 // Find the address of the last argument.
621 __ movp(rcx, rax);
622 if (push_receiver) {
623 __ addp(rcx, Immediate(1)); // Add one for receiver.
624 }
625
626 __ shlp(rcx, Immediate(kPointerSizeLog2));
627 __ negp(rcx);
628 __ addp(rcx, rbx);
629
630 // Push the arguments.
631 Label loop_header, loop_check;
632 __ j(always, &loop_check);
633 __ bind(&loop_header);
634 __ Push(Operand(rbx, 0));
635 __ subp(rbx, Immediate(kPointerSize));
636 __ bind(&loop_check);
637 __ cmpp(rbx, rcx);
638 __ j(greater, &loop_header, Label::kNear);
639}
640
641
642// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100643void Builtins::Generate_InterpreterPushArgsAndCallImpl(
644 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000645 // ----------- S t a t e -------------
646 // -- rax : the number of arguments (not including the receiver)
647 // -- rbx : the address of the first argument to be pushed. Subsequent
648 // arguments should be consecutive above this, in the same order as
649 // they are to be pushed onto the stack.
650 // -- rdi : the target to call (can be any Object).
651 // -----------------------------------
652
653 // Pop return address to allow tail-call after pushing arguments.
654 __ PopReturnAddressTo(kScratchRegister);
655
656 Generate_InterpreterPushArgs(masm, true);
657
658 // Call the target.
659 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100660 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
661 tail_call_mode),
662 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000663}
664
665
666// static
667void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
668 // ----------- S t a t e -------------
669 // -- rax : the number of arguments (not including the receiver)
670 // -- rdx : the new target (either the same as the constructor or
671 // the JSFunction on which new was invoked initially)
672 // -- rdi : the constructor to call (can be any Object)
673 // -- rbx : the address of the first argument to be pushed. Subsequent
674 // arguments should be consecutive above this, in the same order as
675 // they are to be pushed onto the stack.
676 // -----------------------------------
677
678 // Pop return address to allow tail-call after pushing arguments.
679 __ PopReturnAddressTo(kScratchRegister);
680
681 // Push slot for the receiver to be constructed.
682 __ Push(Immediate(0));
683
684 Generate_InterpreterPushArgs(masm, false);
685
686 // Push return address in preparation for the tail-call.
687 __ PushReturnAddressFrom(kScratchRegister);
688
689 // Call the constructor (rax, rdx, rdi passed on).
690 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
691}
692
693
Ben Murdoch097c5b22016-05-18 11:27:45 +0100694static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000695 // Initialize register file register and dispatch table register.
696 __ movp(kInterpreterRegisterFileRegister, rbp);
697 __ addp(kInterpreterRegisterFileRegister,
698 Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100699 __ Move(
700 kInterpreterDispatchTableRegister,
701 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702
703 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000704 __ movp(kContextRegister,
705 Operand(kInterpreterRegisterFileRegister,
706 InterpreterFrameConstants::kContextFromRegisterPointer));
707
708 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100709 __ movp(
710 kInterpreterBytecodeArrayRegister,
711 Operand(kInterpreterRegisterFileRegister,
712 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000713
714 if (FLAG_debug_code) {
715 // Check function data field is actually a BytecodeArray object.
716 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
717 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
718 rbx);
719 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
720 }
721
722 // Get the target bytecode offset from the frame.
723 __ movp(
724 kInterpreterBytecodeOffsetRegister,
725 Operand(kInterpreterRegisterFileRegister,
726 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
727 __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
728 kInterpreterBytecodeOffsetRegister);
729
730 // Dispatch to the target bytecode.
731 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
732 kInterpreterBytecodeOffsetRegister, times_1, 0));
733 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
734 times_pointer_size, 0));
735 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
736 __ jmp(rbx);
737}
738
739
Ben Murdoch097c5b22016-05-18 11:27:45 +0100740static void Generate_InterpreterNotifyDeoptimizedHelper(
741 MacroAssembler* masm, Deoptimizer::BailoutType type) {
742 // Enter an internal frame.
743 {
744 FrameScope scope(masm, StackFrame::INTERNAL);
745
746 // Pass the deoptimization type to the runtime system.
747 __ Push(Smi::FromInt(static_cast<int>(type)));
748 __ CallRuntime(Runtime::kNotifyDeoptimized);
749 // Tear down internal frame.
750 }
751
752 // Drop state (we don't use these for interpreter deopts) and and pop the
753 // accumulator value into the accumulator register and push PC at top
754 // of stack (to simulate initial call to bytecode handler in interpreter entry
755 // trampoline).
756 __ Pop(rbx);
757 __ Drop(1);
758 __ Pop(kInterpreterAccumulatorRegister);
759 __ Push(rbx);
760
761 // Enter the bytecode dispatch.
762 Generate_EnterBytecodeDispatch(masm);
763}
764
765
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000766void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
767 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
768}
769
770
771void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
772 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
773}
774
775
776void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
777 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
778}
779
Ben Murdoch097c5b22016-05-18 11:27:45 +0100780void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
781 // Set the address of the interpreter entry trampoline as a return address.
782 // This simulates the initial call to bytecode handlers in interpreter entry
783 // trampoline. The return will never actually be taken, but our stack walker
784 // uses this address to determine whether a frame is interpreted.
785 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline());
786
787 Generate_EnterBytecodeDispatch(masm);
788}
789
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000790
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100792 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Iain Merrick75681382010-08-19 15:07:18 +0100793}
794
Ben Murdochb0fe1622011-05-05 13:52:32 +0100795
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100797 GenerateTailCallToReturnedCode(masm,
798 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799}
800
801
802void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100803 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000804}
805
806
807static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
808 // For now, we are relying on the fact that make_code_young doesn't do any
809 // garbage collection which allows us to save/restore the registers without
810 // worrying about which of them contain pointers. We also don't build an
811 // internal frame to make the code faster, since we shouldn't have to do stack
812 // crawls in MakeCodeYoung. This seems a bit fragile.
813
814 // Re-execute the code that was patched back to the young age when
815 // the stub returns.
816 __ subp(Operand(rsp, 0), Immediate(5));
817 __ Pushad();
818 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
819 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
820 { // NOLINT
821 FrameScope scope(masm, StackFrame::MANUAL);
822 __ PrepareCallCFunction(2);
823 __ CallCFunction(
824 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
825 }
826 __ Popad();
827 __ ret(0);
828}
829
830
831#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
832void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
833 MacroAssembler* masm) { \
834 GenerateMakeCodeYoungAgainCommon(masm); \
835} \
836void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
837 MacroAssembler* masm) { \
838 GenerateMakeCodeYoungAgainCommon(masm); \
839}
840CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
841#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
842
843
844void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
845 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
846 // that make_code_young doesn't do any garbage collection which allows us to
847 // save/restore the registers without worrying about which of them contain
848 // pointers.
849 __ Pushad();
850 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
851 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
852 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
853 { // NOLINT
854 FrameScope scope(masm, StackFrame::MANUAL);
855 __ PrepareCallCFunction(2);
856 __ CallCFunction(
857 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
858 2);
859 }
860 __ Popad();
861
862 // Perform prologue operations usually performed by the young code stub.
863 __ PopReturnAddressTo(kScratchRegister);
864 __ pushq(rbp); // Caller's frame pointer.
865 __ movp(rbp, rsp);
866 __ Push(rsi); // Callee's context.
867 __ Push(rdi); // Callee's JS Function.
868 __ PushReturnAddressFrom(kScratchRegister);
869
870 // Jump to point after the code-age stub.
871 __ ret(0);
872}
873
874
875void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
876 GenerateMakeCodeYoungAgainCommon(masm);
877}
878
879
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000880void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
881 Generate_MarkCodeAsExecutedOnce(masm);
882}
883
884
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000885static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
886 SaveFPRegsMode save_doubles) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100887 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100888 {
889 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100890
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000891 // Preserve registers across notification, this is important for compiled
892 // stubs that tail call the runtime on deopts passing their parameters in
893 // registers.
894 __ Pushad();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000895 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896 __ Popad();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100897 // Tear down internal frame.
898 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100899
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000900 __ DropUnderReturnAddress(1); // Ignore state offset
901 __ ret(0); // Return to IC Miss stub, continuation still on stack.
902}
903
904
905void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
906 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
907}
908
909
910void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
911 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100912}
913
914
915static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
916 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +0100917 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100918 {
919 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Block1e0659c2011-05-24 12:43:12 +0100920
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100921 // Pass the deoptimization type to the runtime system.
922 __ Push(Smi::FromInt(static_cast<int>(type)));
Steve Block1e0659c2011-05-24 12:43:12 +0100923
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100925 // Tear down internal frame.
926 }
Steve Block1e0659c2011-05-24 12:43:12 +0100927
928 // Get the full codegen state from the stack and untag it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
Steve Block1e0659c2011-05-24 12:43:12 +0100930
931 // Switch on the state.
Ben Murdoch257744e2011-11-30 15:57:28 +0000932 Label not_no_registers, not_tos_rax;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
Ben Murdoch257744e2011-11-30 15:57:28 +0000934 __ j(not_equal, &not_no_registers, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100935 __ ret(1 * kPointerSize); // Remove state.
936
937 __ bind(&not_no_registers);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000938 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
939 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
Ben Murdoch257744e2011-11-30 15:57:28 +0000940 __ j(not_equal, &not_tos_rax, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100941 __ ret(2 * kPointerSize); // Remove state, rax.
942
943 __ bind(&not_tos_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000944 __ Abort(kNoCasesLeft);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100945}
946
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000947
Ben Murdochb0fe1622011-05-05 13:52:32 +0100948void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
949 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
950}
951
952
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
954 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
955}
956
957
Ben Murdochb0fe1622011-05-05 13:52:32 +0100958void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100959 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100960}
961
962
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000963// static
964void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
965 int field_index) {
966 // ----------- S t a t e -------------
967 // -- rsp[0] : return address
968 // -- rsp[8] : receiver
969 // -----------------------------------
970
971 // 1. Load receiver into rax and check that it's actually a JSDate object.
972 Label receiver_not_date;
973 {
974 StackArgumentsAccessor args(rsp, 0);
975 __ movp(rax, args.GetReceiverOperand());
976 __ JumpIfSmi(rax, &receiver_not_date);
977 __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
978 __ j(not_equal, &receiver_not_date);
979 }
980
981 // 2. Load the specified date field, falling back to the runtime as necessary.
982 if (field_index == JSDate::kDateValue) {
983 __ movp(rax, FieldOperand(rax, JSDate::kValueOffset));
984 } else {
985 if (field_index < JSDate::kFirstUncachedField) {
986 Label stamp_mismatch;
987 __ Load(rdx, ExternalReference::date_cache_stamp(masm->isolate()));
988 __ cmpp(rdx, FieldOperand(rax, JSDate::kCacheStampOffset));
989 __ j(not_equal, &stamp_mismatch, Label::kNear);
990 __ movp(rax, FieldOperand(
991 rax, JSDate::kValueOffset + field_index * kPointerSize));
992 __ ret(1 * kPointerSize);
993 __ bind(&stamp_mismatch);
994 }
995 FrameScope scope(masm, StackFrame::INTERNAL);
996 __ PrepareCallCFunction(2);
997 __ Move(arg_reg_1, rax);
998 __ Move(arg_reg_2, Smi::FromInt(field_index));
999 __ CallCFunction(
1000 ExternalReference::get_date_field_function(masm->isolate()), 2);
1001 }
1002 __ ret(1 * kPointerSize);
1003
1004 // 3. Raise a TypeError if the receiver is not a date.
1005 __ bind(&receiver_not_date);
1006 {
1007 FrameScope scope(masm, StackFrame::MANUAL);
1008 __ EnterFrame(StackFrame::INTERNAL);
1009 __ CallRuntime(Runtime::kThrowNotDateError);
1010 }
1011}
1012
1013
1014// static
1015void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1016 // ----------- S t a t e -------------
1017 // -- rax : argc
1018 // -- rsp[0] : return address
1019 // -- rsp[8] : argArray
1020 // -- rsp[16] : thisArg
1021 // -- rsp[24] : receiver
1022 // -----------------------------------
1023
1024 // 1. Load receiver into rdi, argArray into rax (if present), remove all
1025 // arguments from the stack (including the receiver), and push thisArg (if
1026 // present) instead.
1027 {
1028 Label no_arg_array, no_this_arg;
1029 StackArgumentsAccessor args(rsp, rax);
1030 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1031 __ movp(rbx, rdx);
1032 __ movp(rdi, args.GetReceiverOperand());
1033 __ testp(rax, rax);
1034 __ j(zero, &no_this_arg, Label::kNear);
1035 {
1036 __ movp(rdx, args.GetArgumentOperand(1));
1037 __ cmpp(rax, Immediate(1));
1038 __ j(equal, &no_arg_array, Label::kNear);
1039 __ movp(rbx, args.GetArgumentOperand(2));
1040 __ bind(&no_arg_array);
1041 }
1042 __ bind(&no_this_arg);
1043 __ PopReturnAddressTo(rcx);
1044 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1045 __ Push(rdx);
1046 __ PushReturnAddressFrom(rcx);
1047 __ movp(rax, rbx);
1048 }
1049
1050 // ----------- S t a t e -------------
1051 // -- rax : argArray
1052 // -- rdi : receiver
1053 // -- rsp[0] : return address
1054 // -- rsp[8] : thisArg
1055 // -----------------------------------
1056
1057 // 2. Make sure the receiver is actually callable.
1058 Label receiver_not_callable;
1059 __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
1060 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1061 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1062 Immediate(1 << Map::kIsCallable));
1063 __ j(zero, &receiver_not_callable, Label::kNear);
1064
1065 // 3. Tail call with no arguments if argArray is null or undefined.
1066 Label no_arguments;
1067 __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1068 __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments,
1069 Label::kNear);
1070
1071 // 4a. Apply the receiver to the given argArray (passing undefined for
1072 // new.target).
1073 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1074 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1075
1076 // 4b. The argArray is either null or undefined, so we tail call without any
Ben Murdoch097c5b22016-05-18 11:27:45 +01001077 // arguments to the receiver. Since we did not create a frame for
1078 // Function.prototype.apply() yet, we use a normal Call builtin here.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 __ bind(&no_arguments);
1080 {
1081 __ Set(rax, 0);
1082 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1083 }
1084
1085 // 4c. The receiver is not callable, throw an appropriate TypeError.
1086 __ bind(&receiver_not_callable);
1087 {
1088 StackArgumentsAccessor args(rsp, 0);
1089 __ movp(args.GetReceiverOperand(), rdi);
1090 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1091 }
1092}
1093
1094
1095// static
1096void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001097 // Stack Layout:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001098 // rsp[0] : Return address
1099 // rsp[8] : Argument n
1100 // rsp[16] : Argument n-1
Ben Murdochb0fe1622011-05-05 13:52:32 +01001101 // ...
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 // rsp[8 * n] : Argument 1
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001103 // rsp[8 * (n + 1)] : Receiver (callable to call)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001104 //
1105 // rax contains the number of arguments, n, not counting the receiver.
1106 //
1107 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108 {
1109 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110 __ testp(rax, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001111 __ j(not_zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001112 __ PopReturnAddressTo(rbx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001113 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001114 __ PushReturnAddressFrom(rbx);
1115 __ incp(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001116 __ bind(&done);
1117 }
1118
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001119 // 2. Get the callable to call (passed as receiver) from the stack.
1120 {
1121 StackArgumentsAccessor args(rsp, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001122 __ movp(rdi, args.GetReceiverOperand());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001123 }
1124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001125 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb0fe1622011-05-05 13:52:32 +01001126 // (overwriting the original receiver). Adjust argument count to make
1127 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001128 {
1129 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130 __ movp(rcx, rax);
1131 StackArgumentsAccessor args(rsp, rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001132 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001133 __ movp(rbx, args.GetArgumentOperand(1));
1134 __ movp(args.GetArgumentOperand(0), rbx);
1135 __ decp(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001136 __ j(not_zero, &loop); // While non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1138 __ decp(rax); // One fewer argument (first argument is new receiver).
Ben Murdochb0fe1622011-05-05 13:52:32 +01001139 }
1140
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001141 // 4. Call the callable.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001142 // Since we did not create a frame for Function.prototype.call() yet,
1143 // we use a normal Call builtin here.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001145}
1146
1147
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001148void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1149 // ----------- S t a t e -------------
1150 // -- rax : argc
1151 // -- rsp[0] : return address
1152 // -- rsp[8] : argumentsList
1153 // -- rsp[16] : thisArgument
1154 // -- rsp[24] : target
1155 // -- rsp[32] : receiver
1156 // -----------------------------------
1157
1158 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1159 // remove all arguments from the stack (including the receiver), and push
1160 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001161 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001162 Label done;
1163 StackArgumentsAccessor args(rsp, rax);
1164 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1165 __ movp(rdx, rdi);
1166 __ movp(rbx, rdi);
1167 __ cmpp(rax, Immediate(1));
1168 __ j(below, &done, Label::kNear);
1169 __ movp(rdi, args.GetArgumentOperand(1)); // target
1170 __ j(equal, &done, Label::kNear);
1171 __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument
1172 __ cmpp(rax, Immediate(3));
1173 __ j(below, &done, Label::kNear);
1174 __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList
1175 __ bind(&done);
1176 __ PopReturnAddressTo(rcx);
1177 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1178 __ Push(rdx);
1179 __ PushReturnAddressFrom(rcx);
1180 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001181 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001182
1183 // ----------- S t a t e -------------
1184 // -- rax : argumentsList
1185 // -- rdi : target
1186 // -- rsp[0] : return address
1187 // -- rsp[8] : thisArgument
1188 // -----------------------------------
1189
1190 // 2. Make sure the target is actually callable.
1191 Label target_not_callable;
1192 __ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
1193 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1194 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1195 Immediate(1 << Map::kIsCallable));
1196 __ j(zero, &target_not_callable, Label::kNear);
1197
1198 // 3a. Apply the target to the given argumentsList (passing undefined for
1199 // new.target).
1200 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1201 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1202
1203 // 3b. The target is not callable, throw an appropriate TypeError.
1204 __ bind(&target_not_callable);
1205 {
1206 StackArgumentsAccessor args(rsp, 0);
1207 __ movp(args.GetReceiverOperand(), rdi);
1208 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1209 }
1210}
1211
1212
1213void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1214 // ----------- S t a t e -------------
1215 // -- rax : argc
1216 // -- rsp[0] : return address
1217 // -- rsp[8] : new.target (optional)
1218 // -- rsp[16] : argumentsList
1219 // -- rsp[24] : target
1220 // -- rsp[32] : receiver
1221 // -----------------------------------
1222
1223 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1224 // new.target into rdx (if present, otherwise use target), remove all
1225 // arguments from the stack (including the receiver), and push thisArgument
1226 // (if present) instead.
1227 {
1228 Label done;
1229 StackArgumentsAccessor args(rsp, rax);
1230 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1231 __ movp(rdx, rdi);
1232 __ movp(rbx, rdi);
1233 __ cmpp(rax, Immediate(1));
1234 __ j(below, &done, Label::kNear);
1235 __ movp(rdi, args.GetArgumentOperand(1)); // target
1236 __ movp(rdx, rdi); // new.target defaults to target
1237 __ j(equal, &done, Label::kNear);
1238 __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList
1239 __ cmpp(rax, Immediate(3));
1240 __ j(below, &done, Label::kNear);
1241 __ movp(rdx, args.GetArgumentOperand(3)); // new.target
1242 __ bind(&done);
1243 __ PopReturnAddressTo(rcx);
1244 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1245 __ PushRoot(Heap::kUndefinedValueRootIndex);
1246 __ PushReturnAddressFrom(rcx);
1247 __ movp(rax, rbx);
1248 }
1249
1250 // ----------- S t a t e -------------
1251 // -- rax : argumentsList
1252 // -- rdx : new.target
1253 // -- rdi : target
1254 // -- rsp[0] : return address
1255 // -- rsp[8] : receiver (undefined)
1256 // -----------------------------------
1257
1258 // 2. Make sure the target is actually a constructor.
1259 Label target_not_constructor;
1260 __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear);
1261 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1262 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1263 Immediate(1 << Map::kIsConstructor));
1264 __ j(zero, &target_not_constructor, Label::kNear);
1265
1266 // 3. Make sure the target is actually a constructor.
1267 Label new_target_not_constructor;
1268 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1269 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1270 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1271 Immediate(1 << Map::kIsConstructor));
1272 __ j(zero, &new_target_not_constructor, Label::kNear);
1273
1274 // 4a. Construct the target with the given new.target and argumentsList.
1275 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1276
1277 // 4b. The target is not a constructor, throw an appropriate TypeError.
1278 __ bind(&target_not_constructor);
1279 {
1280 StackArgumentsAccessor args(rsp, 0);
1281 __ movp(args.GetReceiverOperand(), rdi);
1282 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1283 }
1284
1285 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1286 __ bind(&new_target_not_constructor);
1287 {
1288 StackArgumentsAccessor args(rsp, 0);
1289 __ movp(args.GetReceiverOperand(), rdx);
1290 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1291 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001292}
1293
1294
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001295void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1296 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001297 // -- rax : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001298 // -- rsp[0] : return address
1299 // -- rsp[8] : last argument
1300 // -----------------------------------
1301 Label generic_array_code;
1302
1303 // Get the InternalArray function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001304 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001305
1306 if (FLAG_debug_code) {
1307 // Initial map for the builtin InternalArray functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001308 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001309 // Will both indicate a NULL and a Smi.
1310 STATIC_ASSERT(kSmiTag == 0);
1311 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001313 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001315 }
1316
1317 // Run the native code for the InternalArray function called as a normal
1318 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001319 // tail call a stub
1320 InternalArrayConstructorStub stub(masm->isolate());
1321 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001322}
1323
1324
1325void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1326 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001327 // -- rax : argc
Ben Murdochb0fe1622011-05-05 13:52:32 +01001328 // -- rsp[0] : return address
1329 // -- rsp[8] : last argument
1330 // -----------------------------------
1331 Label generic_array_code;
1332
1333 // Get the Array function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001334 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001335
1336 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001337 // Initial map for the builtin Array functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001339 // Will both indicate a NULL and a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001340 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001341 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001342 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001343 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001344 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001345 }
1346
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001347 __ movp(rdx, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001348 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349 // tail call a stub
1350 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1351 ArrayConstructorStub stub(masm->isolate());
1352 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001353}
1354
1355
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001356// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001357void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1358 // ----------- S t a t e -------------
1359 // -- rax : number of arguments
1360 // -- rsp[0] : return address
1361 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1362 // -- rsp[(argc + 1) * 8] : receiver
1363 // -----------------------------------
1364 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1365 Heap::RootListIndex const root_index =
1366 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1367 : Heap::kMinusInfinityValueRootIndex;
1368 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0;
1369
1370 // Load the accumulator with the default return value (either -Infinity or
1371 // +Infinity), with the tagged value in rdx and the double value in xmm0.
1372 __ LoadRoot(rdx, root_index);
1373 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1374 __ Move(rcx, rax);
1375
1376 Label done_loop, loop;
1377 __ bind(&loop);
1378 {
1379 // Check if all parameters done.
1380 __ testp(rcx, rcx);
1381 __ j(zero, &done_loop);
1382
1383 // Load the next parameter tagged value into rbx.
1384 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
1385
1386 // Load the double value of the parameter into xmm1, maybe converting the
1387 // parameter to a number first using the ToNumberStub if necessary.
1388 Label convert, convert_smi, convert_number, done_convert;
1389 __ bind(&convert);
1390 __ JumpIfSmi(rbx, &convert_smi);
1391 __ JumpIfRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1392 Heap::kHeapNumberMapRootIndex, &convert_number);
1393 {
1394 // Parameter is not a Number, use the ToNumberStub to convert it.
1395 FrameScope scope(masm, StackFrame::INTERNAL);
1396 __ Integer32ToSmi(rax, rax);
1397 __ Integer32ToSmi(rcx, rcx);
1398 __ Push(rax);
1399 __ Push(rcx);
1400 __ Push(rdx);
1401 __ movp(rax, rbx);
1402 ToNumberStub stub(masm->isolate());
1403 __ CallStub(&stub);
1404 __ movp(rbx, rax);
1405 __ Pop(rdx);
1406 __ Pop(rcx);
1407 __ Pop(rax);
1408 {
1409 // Restore the double accumulator value (xmm0).
1410 Label restore_smi, done_restore;
1411 __ JumpIfSmi(rdx, &restore_smi, Label::kNear);
1412 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1413 __ jmp(&done_restore, Label::kNear);
1414 __ bind(&restore_smi);
1415 __ SmiToDouble(xmm0, rdx);
1416 __ bind(&done_restore);
1417 }
1418 __ SmiToInteger32(rcx, rcx);
1419 __ SmiToInteger32(rax, rax);
1420 }
1421 __ jmp(&convert);
1422 __ bind(&convert_number);
1423 __ Movsd(xmm1, FieldOperand(rbx, HeapNumber::kValueOffset));
1424 __ jmp(&done_convert, Label::kNear);
1425 __ bind(&convert_smi);
1426 __ SmiToDouble(xmm1, rbx);
1427 __ bind(&done_convert);
1428
1429 // Perform the actual comparison with the accumulator value on the left hand
1430 // side (xmm0) and the next parameter value on the right hand side (xmm1).
1431 Label compare_equal, compare_nan, compare_swap, done_compare;
1432 __ Ucomisd(xmm0, xmm1);
1433 __ j(parity_even, &compare_nan, Label::kNear);
1434 __ j(cc, &done_compare, Label::kNear);
1435 __ j(equal, &compare_equal, Label::kNear);
1436
1437 // Result is on the right hand side.
1438 __ bind(&compare_swap);
1439 __ Movaps(xmm0, xmm1);
1440 __ Move(rdx, rbx);
1441 __ jmp(&done_compare, Label::kNear);
1442
1443 // At least one side is NaN, which means that the result will be NaN too.
1444 __ bind(&compare_nan);
1445 __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1446 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1447 __ jmp(&done_compare, Label::kNear);
1448
1449 // Left and right hand side are equal, check for -0 vs. +0.
1450 __ bind(&compare_equal);
1451 __ Movmskpd(kScratchRegister, reg);
1452 __ testl(kScratchRegister, Immediate(1));
1453 __ j(not_zero, &compare_swap);
1454
1455 __ bind(&done_compare);
1456 __ decp(rcx);
1457 __ jmp(&loop);
1458 }
1459
1460 __ bind(&done_loop);
1461 __ PopReturnAddressTo(rcx);
1462 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1463 __ PushReturnAddressFrom(rcx);
1464 __ movp(rax, rdx);
1465 __ Ret();
1466}
1467
1468// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001469void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001470 // ----------- S t a t e -------------
1471 // -- rax : number of arguments
1472 // -- rdi : constructor function
1473 // -- rsp[0] : return address
1474 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1475 // -- rsp[(argc + 1) * 8] : receiver
1476 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001477
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001478 // 1. Load the first argument into rax and get rid of the rest (including the
1479 // receiver).
1480 Label no_arguments;
1481 {
1482 StackArgumentsAccessor args(rsp, rax);
1483 __ testp(rax, rax);
1484 __ j(zero, &no_arguments, Label::kNear);
1485 __ movp(rbx, args.GetArgumentOperand(1));
1486 __ PopReturnAddressTo(rcx);
1487 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1488 __ PushReturnAddressFrom(rcx);
1489 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001490 }
1491
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001492 // 2a. Convert the first argument to a number.
1493 ToNumberStub stub(masm->isolate());
1494 __ TailCallStub(&stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001495
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496 // 2b. No arguments, return +0 (already in rax).
1497 __ bind(&no_arguments);
1498 __ ret(1 * kPointerSize);
1499}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001500
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001501
1502// static
1503void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001504 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 // -- rax : number of arguments
1506 // -- rdi : constructor function
1507 // -- rdx : new target
1508 // -- rsp[0] : return address
1509 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1510 // -- rsp[(argc + 1) * 8] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001511 // -----------------------------------
1512
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 // 1. Make sure we operate in the context of the called function.
1514 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001515
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 // 2. Load the first argument into rbx and get rid of the rest (including the
1517 // receiver).
1518 {
1519 StackArgumentsAccessor args(rsp, rax);
1520 Label no_arguments, done;
1521 __ testp(rax, rax);
1522 __ j(zero, &no_arguments, Label::kNear);
1523 __ movp(rbx, args.GetArgumentOperand(1));
1524 __ jmp(&done, Label::kNear);
1525 __ bind(&no_arguments);
1526 __ Move(rbx, Smi::FromInt(0));
1527 __ bind(&done);
1528 __ PopReturnAddressTo(rcx);
1529 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1530 __ PushReturnAddressFrom(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001531 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001532
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001533 // 3. Make sure rbx is a number.
1534 {
1535 Label done_convert;
1536 __ JumpIfSmi(rbx, &done_convert);
1537 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1538 Heap::kHeapNumberMapRootIndex);
1539 __ j(equal, &done_convert);
1540 {
1541 FrameScope scope(masm, StackFrame::INTERNAL);
1542 __ Push(rdx);
1543 __ Push(rdi);
1544 __ Move(rax, rbx);
1545 ToNumberStub stub(masm->isolate());
1546 __ CallStub(&stub);
1547 __ Move(rbx, rax);
1548 __ Pop(rdi);
1549 __ Pop(rdx);
1550 }
1551 __ bind(&done_convert);
1552 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 // 4. Check if new target and constructor differ.
1555 Label new_object;
1556 __ cmpp(rdx, rdi);
1557 __ j(not_equal, &new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001559 // 5. Allocate a JSValue wrapper for the number.
1560 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1561 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001562
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563 // 6. Fallback to the runtime to create new object.
1564 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001565 {
1566 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001567 __ Push(rbx); // the first argument
Ben Murdoch097c5b22016-05-18 11:27:45 +01001568 FastNewObjectStub stub(masm->isolate());
1569 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001570 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001571 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001572 __ Ret();
1573}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001574
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575
1576// static
1577void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1578 // ----------- S t a t e -------------
1579 // -- rax : number of arguments
1580 // -- rdi : constructor function
1581 // -- rsp[0] : return address
1582 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1583 // -- rsp[(argc + 1) * 8] : receiver
1584 // -----------------------------------
1585
1586 // 1. Load the first argument into rax and get rid of the rest (including the
1587 // receiver).
1588 Label no_arguments;
1589 {
1590 StackArgumentsAccessor args(rsp, rax);
1591 __ testp(rax, rax);
1592 __ j(zero, &no_arguments, Label::kNear);
1593 __ movp(rbx, args.GetArgumentOperand(1));
1594 __ PopReturnAddressTo(rcx);
1595 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1596 __ PushReturnAddressFrom(rcx);
1597 __ movp(rax, rbx);
1598 }
1599
1600 // 2a. At least one argument, return rax if it's a string, otherwise
1601 // dispatch to appropriate conversion.
1602 Label to_string, symbol_descriptive_string;
1603 {
1604 __ JumpIfSmi(rax, &to_string, Label::kNear);
1605 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1606 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
1607 __ j(above, &to_string, Label::kNear);
1608 __ j(equal, &symbol_descriptive_string, Label::kNear);
1609 __ Ret();
1610 }
1611
1612 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001613 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001614 {
1615 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
1616 __ ret(1 * kPointerSize);
1617 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001618
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001619 // 3a. Convert rax to a string.
1620 __ bind(&to_string);
1621 {
1622 ToStringStub stub(masm->isolate());
1623 __ TailCallStub(&stub);
1624 }
1625
1626 // 3b. Convert symbol in rax to a string.
1627 __ bind(&symbol_descriptive_string);
1628 {
1629 __ PopReturnAddressTo(rcx);
1630 __ Push(rax);
1631 __ PushReturnAddressFrom(rcx);
1632 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1633 }
1634}
1635
1636
1637// static
1638void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1639 // ----------- S t a t e -------------
1640 // -- rax : number of arguments
1641 // -- rdi : constructor function
1642 // -- rdx : new target
1643 // -- rsp[0] : return address
1644 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1645 // -- rsp[(argc + 1) * 8] : receiver
1646 // -----------------------------------
1647
1648 // 1. Make sure we operate in the context of the called function.
1649 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1650
1651 // 2. Load the first argument into rbx and get rid of the rest (including the
1652 // receiver).
1653 {
1654 StackArgumentsAccessor args(rsp, rax);
1655 Label no_arguments, done;
1656 __ testp(rax, rax);
1657 __ j(zero, &no_arguments, Label::kNear);
1658 __ movp(rbx, args.GetArgumentOperand(1));
1659 __ jmp(&done, Label::kNear);
1660 __ bind(&no_arguments);
1661 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1662 __ bind(&done);
1663 __ PopReturnAddressTo(rcx);
1664 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1665 __ PushReturnAddressFrom(rcx);
1666 }
1667
1668 // 3. Make sure rbx is a string.
1669 {
1670 Label convert, done_convert;
1671 __ JumpIfSmi(rbx, &convert, Label::kNear);
1672 __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx);
1673 __ j(below, &done_convert);
1674 __ bind(&convert);
1675 {
1676 FrameScope scope(masm, StackFrame::INTERNAL);
1677 ToStringStub stub(masm->isolate());
1678 __ Push(rdx);
1679 __ Push(rdi);
1680 __ Move(rax, rbx);
1681 __ CallStub(&stub);
1682 __ Move(rbx, rax);
1683 __ Pop(rdi);
1684 __ Pop(rdx);
1685 }
1686 __ bind(&done_convert);
1687 }
1688
1689 // 4. Check if new target and constructor differ.
1690 Label new_object;
1691 __ cmpp(rdx, rdi);
1692 __ j(not_equal, &new_object);
1693
1694 // 5. Allocate a JSValue wrapper for the string.
1695 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1696 __ Ret();
1697
1698 // 6. Fallback to the runtime to create new object.
1699 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001700 {
1701 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001702 __ Push(rbx); // the first argument
Ben Murdoch097c5b22016-05-18 11:27:45 +01001703 FastNewObjectStub stub(masm->isolate());
1704 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001706 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001707 __ Ret();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001708}
1709
1710
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001711static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1712 Label* stack_overflow) {
1713 // ----------- S t a t e -------------
1714 // -- rax : actual number of arguments
1715 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001716 // -- rdx : new target (passed through to callee)
1717 // -- rdi : function (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718 // -----------------------------------
1719 // Check the stack for overflow. We are not trying to catch
1720 // interruptions (e.g. debug break and preemption) here, so the "real stack
1721 // limit" is checked.
1722 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001724 __ movp(rcx, rsp);
1725 // Make rcx the space we have left. The stack might already be overflowed
1726 // here which will cause rcx to become negative.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001727 __ subp(rcx, r8);
1728 // Make r8 the space we need for the array when it is unrolled onto the
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001729 // stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001730 __ movp(r8, rbx);
1731 __ shlp(r8, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732 // Check if the arguments will overflow the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001733 __ cmpp(rcx, r8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001734 __ j(less_equal, stack_overflow); // Signed comparison.
1735}
1736
1737
Ben Murdochb0fe1622011-05-05 13:52:32 +01001738static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001739 __ pushq(rbp);
1740 __ movp(rbp, rsp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001741
1742 // Store the arguments adaptor context sentinel.
1743 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1744
1745 // Push the function on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 __ Push(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001747
Ben Murdoch257744e2011-11-30 15:57:28 +00001748 // Preserve the number of arguments on the stack. Must preserve rax,
1749 // rbx and rcx because these registers are used when copying the
Ben Murdochb0fe1622011-05-05 13:52:32 +01001750 // arguments and the receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00001751 __ Integer32ToSmi(r8, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001752 __ Push(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001753}
1754
1755
1756static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1757 // Retrieve the number of arguments from the stack. Number is a Smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001758 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001759
1760 // Leave the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001761 __ movp(rsp, rbp);
1762 __ popq(rbp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001763
1764 // Remove caller arguments from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001765 __ PopReturnAddressTo(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001766 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1768 __ PushReturnAddressFrom(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001769}
1770
1771
1772void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1773 // ----------- S t a t e -------------
1774 // -- rax : actual number of arguments
1775 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001776 // -- rdx : new target (passed through to callee)
1777 // -- rdi : function (passed through to callee)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001778 // -----------------------------------
1779
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001780 Label invoke, dont_adapt_arguments, stack_overflow;
Steve Block44f0eee2011-05-26 01:26:41 +01001781 Counters* counters = masm->isolate()->counters();
1782 __ IncrementCounter(counters->arguments_adaptors(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001783
1784 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001785 __ cmpp(rax, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001786 __ j(less, &too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001787 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001788 __ j(equal, &dont_adapt_arguments);
1789
1790 { // Enough parameters: Actual >= expected.
1791 __ bind(&enough);
1792 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001793 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001794
1795 // Copy receiver and all expected arguments.
1796 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001797 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001798 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001799
1800 Label copy;
1801 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001802 __ incp(r8);
1803 __ Push(Operand(rax, 0));
1804 __ subp(rax, Immediate(kPointerSize));
1805 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001806 __ j(less, &copy);
1807 __ jmp(&invoke);
1808 }
1809
1810 { // Too few parameters: Actual < expected.
1811 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001812
1813 // If the function is strong we need to throw an error.
1814 Label no_strong_error;
1815 __ movp(kScratchRegister,
1816 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1817 __ testb(FieldOperand(kScratchRegister,
1818 SharedFunctionInfo::kStrongModeByteOffset),
1819 Immediate(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
1820 __ j(equal, &no_strong_error, Label::kNear);
1821
1822 // What we really care about is the required number of arguments.
1823
1824 if (kPointerSize == kInt32Size) {
1825 __ movp(
1826 kScratchRegister,
1827 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1828 __ SmiToInteger32(kScratchRegister, kScratchRegister);
1829 } else {
1830 // See comment near kLengthOffset in src/objects.h
1831 __ movsxlq(
1832 kScratchRegister,
1833 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1834 __ shrq(kScratchRegister, Immediate(1));
1835 }
1836
1837 __ cmpp(rax, kScratchRegister);
1838 __ j(greater_equal, &no_strong_error, Label::kNear);
1839
1840 {
1841 FrameScope frame(masm, StackFrame::MANUAL);
1842 EnterArgumentsAdaptorFrame(masm);
1843 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
1844 }
1845
1846 __ bind(&no_strong_error);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001847 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001848 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001849
1850 // Copy receiver and all actual arguments.
1851 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001853 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001854
1855 Label copy;
1856 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857 __ incp(r8);
1858 __ Push(Operand(rdi, 0));
1859 __ subp(rdi, Immediate(kPointerSize));
1860 __ cmpp(r8, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001861 __ j(less, &copy);
1862
1863 // Fill remaining expected arguments with undefined values.
1864 Label fill;
1865 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1866 __ bind(&fill);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001867 __ incp(r8);
1868 __ Push(kScratchRegister);
1869 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001870 __ j(less, &fill);
1871
1872 // Restore function pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001873 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001874 }
1875
1876 // Call the entry point.
1877 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001878 __ movp(rax, rbx);
1879 // rax : expected number of arguments
1880 // rdx : new target (passed through to callee)
1881 // rdi : function (passed through to callee)
1882 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1883 __ call(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001884
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001885 // Store offset of return address for deoptimizer.
1886 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1887
Ben Murdochb0fe1622011-05-05 13:52:32 +01001888 // Leave frame and return.
1889 LeaveArgumentsAdaptorFrame(masm);
1890 __ ret(0);
1891
1892 // -------------------------------------------
1893 // Dont adapt arguments.
1894 // -------------------------------------------
1895 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001896 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1897 __ jmp(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001898
1899 __ bind(&stack_overflow);
1900 {
1901 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001902 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001903 __ int3();
1904 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001905}
1906
1907
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908// static
1909void Builtins::Generate_Apply(MacroAssembler* masm) {
1910 // ----------- S t a t e -------------
1911 // -- rax : argumentsList
1912 // -- rdi : target
1913 // -- rdx : new.target (checked to be constructor or undefined)
1914 // -- rsp[0] : return address.
1915 // -- rsp[8] : thisArgument
1916 // -----------------------------------
1917
1918 // Create the list of arguments from the array-like argumentsList.
1919 {
1920 Label create_arguments, create_array, create_runtime, done_create;
1921 __ JumpIfSmi(rax, &create_runtime);
1922
1923 // Load the map of argumentsList into rcx.
1924 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1925
1926 // Load native context into rbx.
1927 __ movp(rbx, NativeContextOperand());
1928
1929 // Check if argumentsList is an (unmodified) arguments object.
1930 __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1931 __ j(equal, &create_arguments);
1932 __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX));
1933 __ j(equal, &create_arguments);
1934
1935 // Check if argumentsList is a fast JSArray.
1936 __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
1937 __ j(equal, &create_array);
1938
1939 // Ask the runtime to create the list (actually a FixedArray).
1940 __ bind(&create_runtime);
1941 {
1942 FrameScope scope(masm, StackFrame::INTERNAL);
1943 __ Push(rdi);
1944 __ Push(rdx);
1945 __ Push(rax);
1946 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1947 __ Pop(rdx);
1948 __ Pop(rdi);
1949 __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset));
1950 }
1951 __ jmp(&done_create);
1952
1953 // Try to create the list from an arguments object.
1954 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001955 __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001956 __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset));
1957 __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
1958 __ j(not_equal, &create_runtime);
1959 __ SmiToInteger32(rbx, rbx);
1960 __ movp(rax, rcx);
1961 __ jmp(&done_create);
1962
1963 // Try to create the list from a JSArray object.
1964 __ bind(&create_array);
1965 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
1966 __ DecodeField<Map::ElementsKindBits>(rcx);
1967 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1968 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1969 STATIC_ASSERT(FAST_ELEMENTS == 2);
1970 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
1971 __ j(above, &create_runtime);
1972 __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
1973 __ j(equal, &create_runtime);
1974 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
1975 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
1976
1977 __ bind(&done_create);
1978 }
1979
1980 // Check for stack overflow.
1981 {
1982 // Check the stack for overflow. We are not trying to catch interruptions
1983 // (i.e. debug break and preemption) here, so check the "real stack limit".
1984 Label done;
1985 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
1986 __ movp(rcx, rsp);
1987 // Make rcx the space we have left. The stack might already be overflowed
1988 // here which will cause rcx to become negative.
1989 __ subp(rcx, kScratchRegister);
1990 __ sarp(rcx, Immediate(kPointerSizeLog2));
1991 // Check if the arguments will overflow the stack.
1992 __ cmpp(rcx, rbx);
1993 __ j(greater, &done, Label::kNear); // Signed comparison.
1994 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1995 __ bind(&done);
1996 }
1997
1998 // ----------- S t a t e -------------
1999 // -- rdi : target
2000 // -- rax : args (a FixedArray built from argumentsList)
2001 // -- rbx : len (number of elements to push from args)
2002 // -- rdx : new.target (checked to be constructor or undefined)
2003 // -- rsp[0] : return address.
2004 // -- rsp[8] : thisArgument
2005 // -----------------------------------
2006
2007 // Push arguments onto the stack (thisArgument is already on the stack).
2008 {
2009 __ PopReturnAddressTo(r8);
2010 __ Set(rcx, 0);
2011 Label done, loop;
2012 __ bind(&loop);
2013 __ cmpl(rcx, rbx);
2014 __ j(equal, &done, Label::kNear);
2015 __ Push(
2016 FieldOperand(rax, rcx, times_pointer_size, FixedArray::kHeaderSize));
2017 __ incl(rcx);
2018 __ jmp(&loop);
2019 __ bind(&done);
2020 __ PushReturnAddressFrom(r8);
2021 __ Move(rax, rcx);
2022 }
2023
2024 // Dispatch to Call or Construct depending on whether new.target is undefined.
2025 {
2026 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2027 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2028 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2029 }
2030}
2031
Ben Murdoch097c5b22016-05-18 11:27:45 +01002032namespace {
2033
2034// Drops top JavaScript frame and an arguments adaptor frame below it (if
2035// present) preserving all the arguments prepared for current call.
2036// Does nothing if debugger is currently active.
2037// ES6 14.6.3. PrepareForTailCall
2038//
2039// Stack structure for the function g() tail calling f():
2040//
2041// ------- Caller frame: -------
2042// | ...
2043// | g()'s arg M
2044// | ...
2045// | g()'s arg 1
2046// | g()'s receiver arg
2047// | g()'s caller pc
2048// ------- g()'s frame: -------
2049// | g()'s caller fp <- fp
2050// | g()'s context
2051// | function pointer: g
2052// | -------------------------
2053// | ...
2054// | ...
2055// | f()'s arg N
2056// | ...
2057// | f()'s arg 1
2058// | f()'s receiver arg
2059// | f()'s caller pc <- sp
2060// ----------------------
2061//
2062void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2063 Register scratch1, Register scratch2,
2064 Register scratch3) {
2065 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2066 Comment cmnt(masm, "[ PrepareForTailCall");
2067
2068 // Prepare for tail call only if the debugger is not active.
2069 Label done;
2070 ExternalReference debug_is_active =
2071 ExternalReference::debug_is_active_address(masm->isolate());
2072 __ Move(kScratchRegister, debug_is_active);
2073 __ cmpb(Operand(kScratchRegister, 0), Immediate(0));
2074 __ j(not_equal, &done);
2075
2076 // Drop possible interpreter handler/stub frame.
2077 {
2078 Label no_interpreter_frame;
2079 __ Cmp(Operand(rbp, StandardFrameConstants::kMarkerOffset),
2080 Smi::FromInt(StackFrame::STUB));
2081 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2082 __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2083 __ bind(&no_interpreter_frame);
2084 }
2085
2086 // Check if next frame is an arguments adaptor frame.
2087 Label no_arguments_adaptor, formal_parameter_count_loaded;
2088 __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2089 __ Cmp(Operand(scratch2, StandardFrameConstants::kContextOffset),
2090 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2091 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2092
2093 // Drop arguments adaptor frame and load arguments count.
2094 __ movp(rbp, scratch2);
2095 __ SmiToInteger32(
2096 scratch1, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2097 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2098
2099 __ bind(&no_arguments_adaptor);
2100 // Load caller's formal parameter count
2101 __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2102 __ movp(scratch1,
2103 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2104 __ LoadSharedFunctionInfoSpecialField(
2105 scratch1, scratch1, SharedFunctionInfo::kFormalParameterCountOffset);
2106
2107 __ bind(&formal_parameter_count_loaded);
2108
2109 // Calculate the destination address where we will put the return address
2110 // after we drop current frame.
2111 Register new_sp_reg = scratch2;
2112 __ subp(scratch1, args_reg);
2113 __ leap(new_sp_reg, Operand(rbp, scratch1, times_pointer_size,
2114 StandardFrameConstants::kCallerPCOffset));
2115
2116 if (FLAG_debug_code) {
2117 __ cmpp(rsp, new_sp_reg);
2118 __ Check(below, kStackAccessBelowStackPointer);
2119 }
2120
2121 // Copy receiver and return address as well.
2122 Register count_reg = scratch1;
2123 __ leap(count_reg, Operand(args_reg, 2));
2124
2125 // Copy return address from caller's frame to current frame's return address
2126 // to avoid its trashing and let the following loop copy it to the right
2127 // place.
2128 Register tmp_reg = scratch3;
2129 __ movp(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
2130 __ movp(Operand(rsp, 0), tmp_reg);
2131
2132 // Restore caller's frame pointer now as it could be overwritten by
2133 // the copying loop.
2134 __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2135
2136 Operand src(rsp, count_reg, times_pointer_size, 0);
2137 Operand dst(new_sp_reg, count_reg, times_pointer_size, 0);
2138
2139 // Now copy callee arguments to the caller frame going backwards to avoid
2140 // callee arguments corruption (source and destination areas could overlap).
2141 Label loop, entry;
2142 __ jmp(&entry, Label::kNear);
2143 __ bind(&loop);
2144 __ decp(count_reg);
2145 __ movp(tmp_reg, src);
2146 __ movp(dst, tmp_reg);
2147 __ bind(&entry);
2148 __ cmpp(count_reg, Immediate(0));
2149 __ j(not_equal, &loop, Label::kNear);
2150
2151 // Leave current frame.
2152 __ movp(rsp, new_sp_reg);
2153
2154 __ bind(&done);
2155}
2156} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157
2158// static
2159void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002160 ConvertReceiverMode mode,
2161 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002162 // ----------- S t a t e -------------
2163 // -- rax : the number of arguments (not including the receiver)
2164 // -- rdi : the function to call (checked to be a JSFunction)
2165 // -----------------------------------
2166 StackArgumentsAccessor args(rsp, rax);
2167 __ AssertFunction(rdi);
2168
2169 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2170 // Check that the function is not a "classConstructor".
2171 Label class_constructor;
2172 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2173 __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset),
2174 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2175 __ j(not_zero, &class_constructor);
2176
2177 // ----------- S t a t e -------------
2178 // -- rax : the number of arguments (not including the receiver)
2179 // -- rdx : the shared function info.
2180 // -- rdi : the function to call (checked to be a JSFunction)
2181 // -----------------------------------
2182
2183 // Enter the context of the function; ToObject has to run in the function
2184 // context, and we also need to take the global proxy from the function
2185 // context in case of conversion.
2186 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2187 SharedFunctionInfo::kStrictModeByteOffset);
2188 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2189 // We need to convert the receiver for non-native sloppy mode functions.
2190 Label done_convert;
2191 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
2192 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2193 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2194 __ j(not_zero, &done_convert);
2195 {
2196 // ----------- S t a t e -------------
2197 // -- rax : the number of arguments (not including the receiver)
2198 // -- rdx : the shared function info.
2199 // -- rdi : the function to call (checked to be a JSFunction)
2200 // -- rsi : the function context.
2201 // -----------------------------------
2202
2203 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2204 // Patch receiver to global proxy.
2205 __ LoadGlobalProxy(rcx);
2206 } else {
2207 Label convert_to_object, convert_receiver;
2208 __ movp(rcx, args.GetReceiverOperand());
2209 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2210 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2211 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2212 __ j(above_equal, &done_convert);
2213 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2214 Label convert_global_proxy;
2215 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
2216 &convert_global_proxy, Label::kNear);
2217 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
2218 Label::kNear);
2219 __ bind(&convert_global_proxy);
2220 {
2221 // Patch receiver to global proxy.
2222 __ LoadGlobalProxy(rcx);
2223 }
2224 __ jmp(&convert_receiver);
2225 }
2226 __ bind(&convert_to_object);
2227 {
2228 // Convert receiver using ToObject.
2229 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2230 // in the fast case? (fall back to AllocateInNewSpace?)
2231 FrameScope scope(masm, StackFrame::INTERNAL);
2232 __ Integer32ToSmi(rax, rax);
2233 __ Push(rax);
2234 __ Push(rdi);
2235 __ movp(rax, rcx);
2236 ToObjectStub stub(masm->isolate());
2237 __ CallStub(&stub);
2238 __ movp(rcx, rax);
2239 __ Pop(rdi);
2240 __ Pop(rax);
2241 __ SmiToInteger32(rax, rax);
2242 }
2243 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2244 __ bind(&convert_receiver);
2245 }
2246 __ movp(args.GetReceiverOperand(), rcx);
2247 }
2248 __ bind(&done_convert);
2249
2250 // ----------- S t a t e -------------
2251 // -- rax : the number of arguments (not including the receiver)
2252 // -- rdx : the shared function info.
2253 // -- rdi : the function to call (checked to be a JSFunction)
2254 // -- rsi : the function context.
2255 // -----------------------------------
2256
Ben Murdoch097c5b22016-05-18 11:27:45 +01002257 if (tail_call_mode == TailCallMode::kAllow) {
2258 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2259 }
2260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002261 __ LoadSharedFunctionInfoSpecialField(
2262 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
2263 ParameterCount actual(rax);
2264 ParameterCount expected(rbx);
2265
2266 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION,
2267 CheckDebugStepCallWrapper());
2268
2269 // The function is a "classConstructor", need to raise an exception.
2270 __ bind(&class_constructor);
2271 {
2272 FrameScope frame(masm, StackFrame::INTERNAL);
2273 __ Push(rdi);
2274 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2275 }
2276}
2277
2278
2279namespace {
2280
2281void Generate_PushBoundArguments(MacroAssembler* masm) {
2282 // ----------- S t a t e -------------
2283 // -- rax : the number of arguments (not including the receiver)
2284 // -- rdx : new.target (only in case of [[Construct]])
2285 // -- rdi : target (checked to be a JSBoundFunction)
2286 // -----------------------------------
2287
2288 // Load [[BoundArguments]] into rcx and length of that into rbx.
2289 Label no_bound_arguments;
2290 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2291 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2292 __ testl(rbx, rbx);
2293 __ j(zero, &no_bound_arguments);
2294 {
2295 // ----------- S t a t e -------------
2296 // -- rax : the number of arguments (not including the receiver)
2297 // -- rdx : new.target (only in case of [[Construct]])
2298 // -- rdi : target (checked to be a JSBoundFunction)
2299 // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2300 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2301 // -----------------------------------
2302
2303 // Reserve stack space for the [[BoundArguments]].
2304 {
2305 Label done;
2306 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2307 __ subp(rsp, kScratchRegister);
2308 // Check the stack for overflow. We are not trying to catch interruptions
2309 // (i.e. debug break and preemption) here, so check the "real stack
2310 // limit".
2311 __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2312 __ j(greater, &done, Label::kNear); // Signed comparison.
2313 // Restore the stack pointer.
2314 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2315 {
2316 FrameScope scope(masm, StackFrame::MANUAL);
2317 __ EnterFrame(StackFrame::INTERNAL);
2318 __ CallRuntime(Runtime::kThrowStackOverflow);
2319 }
2320 __ bind(&done);
2321 }
2322
2323 // Adjust effective number of arguments to include return address.
2324 __ incl(rax);
2325
2326 // Relocate arguments and return address down the stack.
2327 {
2328 Label loop;
2329 __ Set(rcx, 0);
2330 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2331 __ bind(&loop);
2332 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2333 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2334 __ incl(rcx);
2335 __ cmpl(rcx, rax);
2336 __ j(less, &loop);
2337 }
2338
2339 // Copy [[BoundArguments]] to the stack (below the arguments).
2340 {
2341 Label loop;
2342 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2343 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2344 __ bind(&loop);
2345 __ decl(rbx);
2346 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2347 FixedArray::kHeaderSize));
2348 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2349 __ leal(rax, Operand(rax, 1));
2350 __ j(greater, &loop);
2351 }
2352
2353 // Adjust effective number of arguments (rax contains the number of
2354 // arguments from the call plus return address plus the number of
2355 // [[BoundArguments]]), so we need to subtract one for the return address.
2356 __ decl(rax);
2357 }
2358 __ bind(&no_bound_arguments);
2359}
2360
2361} // namespace
2362
2363
2364// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002365void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2366 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002367 // ----------- S t a t e -------------
2368 // -- rax : the number of arguments (not including the receiver)
2369 // -- rdi : the function to call (checked to be a JSBoundFunction)
2370 // -----------------------------------
2371 __ AssertBoundFunction(rdi);
2372
Ben Murdoch097c5b22016-05-18 11:27:45 +01002373 if (tail_call_mode == TailCallMode::kAllow) {
2374 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2375 }
2376
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002377 // Patch the receiver to [[BoundThis]].
2378 StackArgumentsAccessor args(rsp, rax);
2379 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2380 __ movp(args.GetReceiverOperand(), rbx);
2381
2382 // Push the [[BoundArguments]] onto the stack.
2383 Generate_PushBoundArguments(masm);
2384
2385 // Call the [[BoundTargetFunction]] via the Call builtin.
2386 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2387 __ Load(rcx,
2388 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2389 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2390 __ jmp(rcx);
2391}
2392
2393
2394// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002395void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2396 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002397 // ----------- S t a t e -------------
2398 // -- rax : the number of arguments (not including the receiver)
2399 // -- rdi : the target to call (can be any Object)
2400 // -----------------------------------
2401 StackArgumentsAccessor args(rsp, rax);
2402
2403 Label non_callable, non_function, non_smi;
2404 __ JumpIfSmi(rdi, &non_callable);
2405 __ bind(&non_smi);
2406 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002407 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002408 RelocInfo::CODE_TARGET);
2409 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002410 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002411 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002412
2413 // Check if target has a [[Call]] internal method.
2414 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2415 Immediate(1 << Map::kIsCallable));
2416 __ j(zero, &non_callable);
2417
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002418 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2419 __ j(not_equal, &non_function);
2420
Ben Murdoch097c5b22016-05-18 11:27:45 +01002421 // 0. Prepare for tail call if necessary.
2422 if (tail_call_mode == TailCallMode::kAllow) {
2423 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2424 }
2425
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002426 // 1. Runtime fallback for Proxy [[Call]].
2427 __ PopReturnAddressTo(kScratchRegister);
2428 __ Push(rdi);
2429 __ PushReturnAddressFrom(kScratchRegister);
2430 // Increase the arguments size to include the pushed function and the
2431 // existing receiver on the stack.
2432 __ addp(rax, Immediate(2));
2433 // Tail-call to the runtime.
2434 __ JumpToExternalReference(
2435 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2436
2437 // 2. Call to something else, which might have a [[Call]] internal method (if
2438 // not we raise an exception).
2439 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002440 // Overwrite the original receiver with the (original) target.
2441 __ movp(args.GetReceiverOperand(), rdi);
2442 // Let the "call_as_function_delegate" take care of the rest.
2443 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2444 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002445 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446 RelocInfo::CODE_TARGET);
2447
2448 // 3. Call to something that is not callable.
2449 __ bind(&non_callable);
2450 {
2451 FrameScope scope(masm, StackFrame::INTERNAL);
2452 __ Push(rdi);
2453 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2454 }
2455}
2456
2457
2458// static
2459void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2460 // ----------- S t a t e -------------
2461 // -- rax : the number of arguments (not including the receiver)
2462 // -- rdx : the new target (checked to be a constructor)
2463 // -- rdi : the constructor to call (checked to be a JSFunction)
2464 // -----------------------------------
2465 __ AssertFunction(rdi);
2466
2467 // Calling convention for function specific ConstructStubs require
2468 // rbx to contain either an AllocationSite or undefined.
2469 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2470
2471 // Tail call to the function-specific construct stub (still in the caller
2472 // context at this point).
2473 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2474 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
2475 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2476 __ jmp(rcx);
2477}
2478
2479
2480// static
2481void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2482 // ----------- S t a t e -------------
2483 // -- rax : the number of arguments (not including the receiver)
2484 // -- rdx : the new target (checked to be a constructor)
2485 // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2486 // -----------------------------------
2487 __ AssertBoundFunction(rdi);
2488
2489 // Push the [[BoundArguments]] onto the stack.
2490 Generate_PushBoundArguments(masm);
2491
2492 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2493 {
2494 Label done;
2495 __ cmpp(rdi, rdx);
2496 __ j(not_equal, &done, Label::kNear);
2497 __ movp(rdx,
2498 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2499 __ bind(&done);
2500 }
2501
2502 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2503 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2504 __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate()));
2505 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2506 __ jmp(rcx);
2507}
2508
2509
2510// static
2511void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2512 // ----------- S t a t e -------------
2513 // -- rax : the number of arguments (not including the receiver)
2514 // -- rdi : the constructor to call (checked to be a JSProxy)
2515 // -- rdx : the new target (either the same as the constructor or
2516 // the JSFunction on which new was invoked initially)
2517 // -----------------------------------
2518
2519 // Call into the Runtime for Proxy [[Construct]].
2520 __ PopReturnAddressTo(kScratchRegister);
2521 __ Push(rdi);
2522 __ Push(rdx);
2523 __ PushReturnAddressFrom(kScratchRegister);
2524 // Include the pushed new_target, constructor and the receiver.
2525 __ addp(rax, Immediate(3));
2526 __ JumpToExternalReference(
2527 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2528}
2529
2530
2531// static
2532void Builtins::Generate_Construct(MacroAssembler* masm) {
2533 // ----------- S t a t e -------------
2534 // -- rax : the number of arguments (not including the receiver)
2535 // -- rdx : the new target (either the same as the constructor or
2536 // the JSFunction on which new was invoked initially)
2537 // -- rdi : the constructor to call (can be any Object)
2538 // -----------------------------------
2539 StackArgumentsAccessor args(rsp, rax);
2540
2541 // Check if target is a Smi.
2542 Label non_constructor;
2543 __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
2544
2545 // Dispatch based on instance type.
2546 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2547 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2548 RelocInfo::CODE_TARGET);
2549
2550 // Check if target has a [[Construct]] internal method.
2551 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2552 Immediate(1 << Map::kIsConstructor));
2553 __ j(zero, &non_constructor, Label::kNear);
2554
2555 // Only dispatch to bound functions after checking whether they are
2556 // constructors.
2557 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2558 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2559 RelocInfo::CODE_TARGET);
2560
2561 // Only dispatch to proxies after checking whether they are constructors.
2562 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2563 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2564 RelocInfo::CODE_TARGET);
2565
2566 // Called Construct on an exotic Object with a [[Construct]] internal method.
2567 {
2568 // Overwrite the original receiver with the (original) target.
2569 __ movp(args.GetReceiverOperand(), rdi);
2570 // Let the "call_as_constructor_delegate" take care of the rest.
2571 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2572 __ Jump(masm->isolate()->builtins()->CallFunction(),
2573 RelocInfo::CODE_TARGET);
2574 }
2575
2576 // Called Construct on an Object that doesn't have a [[Construct]] internal
2577 // method.
2578 __ bind(&non_constructor);
2579 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2580 RelocInfo::CODE_TARGET);
2581}
2582
2583
2584static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2585 Register function_template_info,
2586 Register scratch0, Register scratch1,
2587 Register scratch2,
2588 Label* receiver_check_failed) {
2589 Register signature = scratch0;
2590 Register map = scratch1;
2591 Register constructor = scratch2;
2592
2593 // If there is no signature, return the holder.
2594 __ movp(signature, FieldOperand(function_template_info,
2595 FunctionTemplateInfo::kSignatureOffset));
2596 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
2597 Label receiver_check_passed;
2598 __ j(equal, &receiver_check_passed, Label::kNear);
2599
2600 // Walk the prototype chain.
2601 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
2602 Label prototype_loop_start;
2603 __ bind(&prototype_loop_start);
2604
2605 // Get the constructor, if any.
2606 __ GetMapConstructor(constructor, map, kScratchRegister);
2607 __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE);
2608 Label next_prototype;
2609 __ j(not_equal, &next_prototype, Label::kNear);
2610
2611 // Get the constructor's signature.
2612 Register type = constructor;
2613 __ movp(type,
2614 FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
2615 __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset));
2616
2617 // Loop through the chain of inheriting function templates.
2618 Label function_template_loop;
2619 __ bind(&function_template_loop);
2620
2621 // If the signatures match, we have a compatible receiver.
2622 __ cmpp(signature, type);
2623 __ j(equal, &receiver_check_passed, Label::kNear);
2624
2625 // If the current type is not a FunctionTemplateInfo, load the next prototype
2626 // in the chain.
2627 __ JumpIfSmi(type, &next_prototype, Label::kNear);
2628 __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister);
2629 __ j(not_equal, &next_prototype, Label::kNear);
2630
2631 // Otherwise load the parent function template and iterate.
2632 __ movp(type,
2633 FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
2634 __ jmp(&function_template_loop, Label::kNear);
2635
2636 // Load the next prototype.
2637 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002638 __ testq(FieldOperand(map, Map::kBitField3Offset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01002639 Immediate(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002640 __ j(zero, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002641 __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset));
2642 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002643 // Iterate.
2644 __ jmp(&prototype_loop_start, Label::kNear);
2645
2646 __ bind(&receiver_check_passed);
2647}
2648
2649
2650void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
2651 // ----------- S t a t e -------------
2652 // -- rax : number of arguments (not including the receiver)
2653 // -- rdi : callee
2654 // -- rsi : context
2655 // -- rsp[0] : return address
2656 // -- rsp[8] : last argument
2657 // -- ...
2658 // -- rsp[rax * 8] : first argument
2659 // -- rsp[(rax + 1) * 8] : receiver
2660 // -----------------------------------
2661
2662 StackArgumentsAccessor args(rsp, rax);
2663
2664 // Load the FunctionTemplateInfo.
2665 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2666 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
2667
2668 // Do the compatible receiver check.
2669 Label receiver_check_failed;
2670 __ movp(rcx, args.GetReceiverOperand());
2671 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed);
2672
2673 // Get the callback offset from the FunctionTemplateInfo, and jump to the
2674 // beginning of the code.
2675 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset));
2676 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset));
2677 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2678 __ jmp(rdx);
2679
2680 // Compatible receiver check failed: pop return address, arguments and
2681 // receiver and throw an Illegal Invocation exception.
2682 __ bind(&receiver_check_failed);
2683 __ PopReturnAddressTo(rbx);
2684 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize));
2685 __ addp(rsp, rax);
2686 __ PushReturnAddressFrom(rbx);
2687 {
2688 FrameScope scope(masm, StackFrame::INTERNAL);
2689 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
2690 }
2691}
2692
2693
Ben Murdochb0fe1622011-05-05 13:52:32 +01002694void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002695 // Lookup the function in the JavaScript frame.
2696 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002697 {
2698 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002699 // Pass function as argument.
2700 __ Push(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002701 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002702 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002703
Ben Murdoch257744e2011-11-30 15:57:28 +00002704 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002705 // If the code object is null, just return to the unoptimized code.
2706 __ cmpp(rax, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002707 __ j(not_equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002708 __ ret(0);
2709
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002710 __ bind(&skip);
2711
2712 // Load deoptimization data from the code object.
2713 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2714
2715 // Load the OSR entrypoint offset from the deoptimization data.
2716 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
2717 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
2718
2719 // Compute the target address = code_obj + header_size + osr_offset
2720 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2721
2722 // Overwrite the return address on the stack.
2723 __ movq(StackOperandForReturnAddress(0), rax);
2724
2725 // And "return" to the OSR entry point of the function.
2726 __ ret(0);
2727}
2728
2729
2730void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
2731 // We check the stack limit as indicator that recompilation might be done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002732 Label ok;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002733 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002734 __ j(above_equal, &ok);
2735 {
2736 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002737 __ CallRuntime(Runtime::kStackGuard);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002738 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002739 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
2740 RelocInfo::CODE_TARGET);
2741
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002742 __ bind(&ok);
2743 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002744}
2745
2746
2747#undef __
2748
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002749} // namespace internal
2750} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01002751
2752#endif // V8_TARGET_ARCH_X64