blob: 419ee0fa5c9b547098b732f743402a8865fae6b9 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-factory.h"
8#include "src/codegen.h"
9#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000011
12namespace v8 {
13namespace internal {
14
Ben Murdochb0fe1622011-05-05 13:52:32 +010015
Steve Blocka7e24c12009-10-30 11:49:00 +000016#define __ ACCESS_MASM(masm)
17
Steve Blocka7e24c12009-10-30 11:49:00 +000018
Leon Clarkee46be812010-01-19 14:06:41 +000019void Builtins::Generate_Adaptor(MacroAssembler* masm,
20 CFunctionId id,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // -- rax : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 // -- rdi : target
25 // -- rdx : new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026 // -- rsp[0] : return address
27 // -- rsp[8] : last argument
Leon Clarkee46be812010-01-19 14:06:41 +000028 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 // -- rsp[8 * argc] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000030 // -- rsp[8 * (argc + 1)] : receiver
Leon Clarkee46be812010-01-19 14:06:41 +000031 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(rdi);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000039
40 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 if (extra_args != BuiltinExtraArguments::kNone) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 __ PopReturnAddressTo(kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 if (extra_args & BuiltinExtraArguments::kTarget) {
45 ++num_extra_args;
46 __ Push(rdi);
47 }
48 if (extra_args & BuiltinExtraArguments::kNewTarget) {
49 ++num_extra_args;
50 __ Push(rdx);
51 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 __ PushReturnAddressFrom(kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +000053 }
54
Steve Block6ded16b2010-05-10 14:33:55 +010055 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000056 // including the receiver and the extra arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 __ addp(rax, Immediate(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058
59 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000060}
61
62
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
64 __ movp(kScratchRegister,
65 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
66 __ movp(kScratchRegister,
67 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
68 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
69 __ jmp(kScratchRegister);
70}
71
Ben Murdoch097c5b22016-05-18 11:27:45 +010072static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
73 Runtime::FunctionId function_id) {
74 // ----------- S t a t e -------------
75 // -- rax : argument count (preserved for callee)
76 // -- rdx : new target (preserved for callee)
77 // -- rdi : target function (preserved for callee)
78 // -----------------------------------
79 {
80 FrameScope scope(masm, StackFrame::INTERNAL);
81 // Push the number of arguments to the callee.
82 __ Integer32ToSmi(rax, rax);
83 __ Push(rax);
84 // Push a copy of the target function and the new target.
85 __ Push(rdi);
86 __ Push(rdx);
87 // Function is also the parameter to the runtime call.
88 __ Push(rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089
Ben Murdoch097c5b22016-05-18 11:27:45 +010090 __ CallRuntime(function_id, 1);
91 __ movp(rbx, rax);
92
93 // Restore target function and new target.
94 __ Pop(rdx);
95 __ Pop(rdi);
96 __ Pop(rax);
97 __ SmiToInteger32(rax, rax);
98 }
99 __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
100 __ jmp(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101}
102
103
104void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
105 // Checking whether the queued function is ready for install is optional,
106 // since we come across interrupts and stack checks elsewhere. However,
107 // not checking may delay installing ready functions, and always checking
108 // would be quite expensive. A good compromise is to first check against
109 // stack limit as a cue for an interrupt signal.
110 Label ok;
111 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
112 __ j(above_equal, &ok);
113
Ben Murdoch097c5b22016-05-18 11:27:45 +0100114 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000115
116 __ bind(&ok);
117 GenerateTailCallToSharedCode(masm);
118}
119
120
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100121static void Generate_JSConstructStubHelper(MacroAssembler* masm,
122 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100123 bool create_implicit_receiver,
124 bool check_derived_construct) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000125 // ----------- S t a t e -------------
126 // -- rax: number of arguments
Ben Murdochda12d292016-06-02 14:46:10 +0100127 // -- rsi: context
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 // -- rdi: constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 // -- rbx: allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 // -- rdx: new target
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 // -----------------------------------
132
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100133 // Enter a construct frame.
134 {
135 FrameScope scope(masm, StackFrame::CONSTRUCT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000136
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000137 // Preserve the incoming parameters on the stack.
138 __ AssertUndefinedOrAllocationSite(rbx);
Ben Murdochda12d292016-06-02 14:46:10 +0100139 __ Push(rsi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140 __ Push(rbx);
141 __ Integer32ToSmi(rcx, rax);
142 __ Push(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000144 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100145 // Allocate the new receiver object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146 __ Push(rdi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000147 __ Push(rdx);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100148 FastNewObjectStub stub(masm->isolate());
149 __ CallStub(&stub);
150 __ movp(rbx, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151 __ Pop(rdx);
152 __ Pop(rdi);
153
Ben Murdoch097c5b22016-05-18 11:27:45 +0100154 // ----------- S t a t e -------------
155 // -- rdi: constructor function
156 // -- rbx: newly allocated object
157 // -- rdx: new target
158 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159
160 // Retrieve smi-tagged arguments count from the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100161 __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 }
163
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000164 if (create_implicit_receiver) {
165 // Push the allocated receiver to the stack. We need two copies
166 // because we may have to return the original one and the calling
167 // conventions dictate that the called function pops the receiver.
168 __ Push(rbx);
169 __ Push(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000171 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100173
174 // Set up pointer to last argument.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000175 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100176
177 // Copy arguments and receiver to the expression stack.
178 Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100180 __ jmp(&entry);
181 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100183 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 __ decp(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100185 __ j(greater_equal, &loop);
186
187 // Call the function.
Ben Murdochc5610432016-08-08 18:44:38 +0100188 ParameterCount actual(rax);
189 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION,
190 CheckDebugStepCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100191
192 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000193 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100194 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
195 }
196
197 // Restore context from the frame.
Ben Murdochda12d292016-06-02 14:46:10 +0100198 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100199
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 if (create_implicit_receiver) {
201 // If the result is an object (in the ECMA sense), we should get rid
202 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
203 // on page 74.
204 Label use_receiver, exit;
205 // If the result is a smi, it is *not* an object in the ECMA sense.
206 __ JumpIfSmi(rax, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100207
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000208 // If the type of the result (stored in its map) is less than
209 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
210 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
211 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
212 __ j(above_equal, &exit);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100213
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000214 // Throw away the result of the constructor invocation and use the
215 // on-stack receiver as the result.
216 __ bind(&use_receiver);
217 __ movp(rax, Operand(rsp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100218
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000219 // Restore the arguments count and leave the construct frame. The
220 // arguments count is stored below the receiver.
221 __ bind(&exit);
222 __ movp(rbx, Operand(rsp, 1 * kPointerSize));
223 } else {
224 __ movp(rbx, Operand(rsp, 0));
225 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100226
227 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000228 }
229
Ben Murdoch097c5b22016-05-18 11:27:45 +0100230 // ES6 9.2.2. Step 13+
231 // Check that the result is not a Smi, indicating that the constructor result
232 // from a derived class is neither undefined nor an Object.
233 if (check_derived_construct) {
234 Label dont_throw;
235 __ JumpIfNotSmi(rax, &dont_throw);
236 {
237 FrameScope scope(masm, StackFrame::INTERNAL);
238 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
239 }
240 __ bind(&dont_throw);
241 }
242
Steve Blocka7e24c12009-10-30 11:49:00 +0000243 // Remove caller arguments from the stack and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 __ PopReturnAddressTo(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000245 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
247 __ PushReturnAddressFrom(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 if (create_implicit_receiver) {
249 Counters* counters = masm->isolate()->counters();
250 __ IncrementCounter(counters->constructed_objects(), 1);
251 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000252 __ ret(0);
253}
254
255
Leon Clarkee46be812010-01-19 14:06:41 +0000256void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100257 Generate_JSConstructStubHelper(masm, false, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000258}
259
260
261void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100262 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263}
264
265
266void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100267 Generate_JSConstructStubHelper(masm, false, false, false);
268}
269
270
271void Builtins::Generate_JSBuiltinsConstructStubForDerived(
272 MacroAssembler* masm) {
273 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000274}
275
276
277void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
278 FrameScope scope(masm, StackFrame::INTERNAL);
279 __ Push(rdi);
280 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
281}
282
283
284enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
285
286
287// Clobbers rcx, r11, kScratchRegister; preserves all other registers.
288static void Generate_CheckStackOverflow(MacroAssembler* masm,
289 IsTagged rax_is_tagged) {
290 // rax : the number of items to be pushed to the stack
291 //
292 // Check the stack for overflow. We are not trying to catch
293 // interruptions (e.g. debug break and preemption) here, so the "real stack
294 // limit" is checked.
295 Label okay;
296 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
297 __ movp(rcx, rsp);
298 // Make rcx the space we have left. The stack might already be overflowed
299 // here which will cause rcx to become negative.
300 __ subp(rcx, kScratchRegister);
301 // Make r11 the space we need for the array when it is unrolled onto the
302 // stack.
303 if (rax_is_tagged == kRaxIsSmiTagged) {
304 __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
305 } else {
306 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
307 __ movp(r11, rax);
308 __ shlq(r11, Immediate(kPointerSizeLog2));
309 }
310 // Check if the arguments will overflow the stack.
311 __ cmpp(rcx, r11);
312 __ j(greater, &okay); // Signed comparison.
313
314 // Out of stack space.
315 __ CallRuntime(Runtime::kThrowStackOverflow);
316
317 __ bind(&okay);
Leon Clarkee46be812010-01-19 14:06:41 +0000318}
319
320
Steve Blocka7e24c12009-10-30 11:49:00 +0000321static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
322 bool is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323 ProfileEntryHookStub::MaybeCallEntryHook(masm);
324
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 // Expects five C++ function parameters.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 // - Object* new_target
327 // - JSFunction* function
Steve Blocka7e24c12009-10-30 11:49:00 +0000328 // - Object* receiver
329 // - int argc
330 // - Object*** argv
331 // (see Handle::Invoke in execution.cc).
332
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100333 // Open a C++ scope for the FrameScope.
334 {
335 // Platform specific argument handling. After this, the stack contains
336 // an internal frame and the pushed function and receiver, and
337 // register rax and rbx holds the argument count and argument array,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000338 // while rdi holds the function pointer, rsi the context, and rdx the
339 // new.target.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100340
Steve Blocka7e24c12009-10-30 11:49:00 +0000341#ifdef _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100342 // MSVC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343 // rcx : new_target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000344 // rdx : function
345 // r8 : receiver
346 // r9 : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100347 // [rsp+0x20] : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000348
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100349 // Enter an internal frame.
350 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000351
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000352 // Setup the context (we need to use the caller context from the isolate).
353 ExternalReference context_address(Isolate::kContextAddress,
354 masm->isolate());
355 __ movp(rsi, masm->ExternalOperand(context_address));
Steve Blocka7e24c12009-10-30 11:49:00 +0000356
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100357 // Push the function and the receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 __ Push(rdx);
359 __ Push(r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000360
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100361 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000362 __ movp(rax, r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100363 // Load the previous frame pointer to access C argument on stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000364 __ movp(kScratchRegister, Operand(rbp, 0));
365 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100366 // Load the function pointer into rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000367 __ movp(rdi, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 // Load the new.target into rdx.
369 __ movp(rdx, rcx);
Steve Block6ded16b2010-05-10 14:33:55 +0100370#else // _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100371 // GCC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000372 // rdi : new_target
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100373 // rsi : function
374 // rdx : receiver
375 // rcx : argc
376 // r8 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000377
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000378 __ movp(r11, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000379 __ movp(rdi, rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100380 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000381 // r11 : new_target
Steve Blocka7e24c12009-10-30 11:49:00 +0000382
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100383 // Clear the context before we push it when entering the internal frame.
384 __ Set(rsi, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000385
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100386 // Enter an internal frame.
387 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000388
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000389 // Setup the context (we need to use the caller context from the isolate).
390 ExternalReference context_address(Isolate::kContextAddress,
391 masm->isolate());
392 __ movp(rsi, masm->ExternalOperand(context_address));
393
394 // Push the function and receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000395 __ Push(rdi);
396 __ Push(rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +0000397
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100398 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000399 __ movp(rax, rcx);
400 __ movp(rbx, r8);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000401
402 // Load the new.target into rdx.
403 __ movp(rdx, r11);
Steve Blocka7e24c12009-10-30 11:49:00 +0000404#endif // _WIN64
405
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100406 // Current stack contents:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 // [rsp + 2 * kPointerSize ... ] : Internal frame
408 // [rsp + kPointerSize] : function
409 // [rsp] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100410 // Current register contents:
411 // rax : argc
412 // rbx : argv
413 // rsi : context
414 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000415 // rdx : new.target
416
417 // Check if we have enough stack space to push all arguments.
418 // Expects argument count in rax. Clobbers rcx, r11.
419 Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
Steve Blocka7e24c12009-10-30 11:49:00 +0000420
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100421 // Copy arguments to the stack in a loop.
422 // Register rbx points to array of pointers to handle locations.
423 // Push the values of these handles.
424 Label loop, entry;
425 __ Set(rcx, 0); // Set loop variable to 0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000426 __ jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100427 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000428 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
429 __ Push(Operand(kScratchRegister, 0)); // dereference handle
430 __ addp(rcx, Immediate(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100431 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 __ cmpp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100433 __ j(not_equal, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000434
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435 // Invoke the builtin code.
436 Handle<Code> builtin = is_construct
437 ? masm->isolate()->builtins()->Construct()
438 : masm->isolate()->builtins()->Call();
439 __ Call(builtin, RelocInfo::CODE_TARGET);
440
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100441 // Exit the internal frame. Notice that this also removes the empty
442 // context and the function left on the stack by the code
443 // invocation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000444 }
445
Steve Blocka7e24c12009-10-30 11:49:00 +0000446 // TODO(X64): Is argument correct? Is there a receiver to remove?
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100447 __ ret(1 * kPointerSize); // Remove receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000448}
449
450
451void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
452 Generate_JSEntryTrampolineHelper(masm, false);
453}
454
455
456void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
457 Generate_JSEntryTrampolineHelper(masm, true);
458}
459
Ben Murdochc5610432016-08-08 18:44:38 +0100460// static
461void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
462 // ----------- S t a t e -------------
463 // -- rax : the value to pass to the generator
464 // -- rbx : the JSGeneratorObject to resume
465 // -- rdx : the resume mode (tagged)
466 // -- rsp[0] : return address
467 // -----------------------------------
468 __ AssertGeneratorObject(rbx);
469
470 // Store input value into generator object.
471 __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOffset), rax);
472 __ RecordWriteField(rbx, JSGeneratorObject::kInputOffset, rax, rcx,
473 kDontSaveFPRegs);
474
475 // Store resume mode into generator object.
476 __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx);
477
478 // Load suspended function and context.
479 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
480 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
481
482 // Flood function if we are stepping.
483 Label skip_flooding;
484 ExternalReference step_in_enabled =
485 ExternalReference::debug_step_in_enabled_address(masm->isolate());
486 Operand step_in_enabled_operand = masm->ExternalOperand(step_in_enabled);
487 __ cmpb(step_in_enabled_operand, Immediate(0));
488 __ j(equal, &skip_flooding);
489 {
490 FrameScope scope(masm, StackFrame::INTERNAL);
491 __ Push(rbx);
492 __ Push(rdx);
493 __ Push(rdi);
494 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
495 __ Pop(rdx);
496 __ Pop(rbx);
497 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset));
498 }
499 __ bind(&skip_flooding);
500
501 // Pop return address.
502 __ PopReturnAddressTo(rax);
503
504 // Push receiver.
505 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset));
506
507 // ----------- S t a t e -------------
508 // -- rax : return address
509 // -- rbx : the JSGeneratorObject to resume
510 // -- rdx : the resume mode (tagged)
511 // -- rdi : generator function
512 // -- rsi : generator context
513 // -- rsp[0] : generator receiver
514 // -----------------------------------
515
516 // Push holes for arguments to generator function. Since the parser forced
517 // context allocation for any variables in generators, the actual argument
518 // values have already been copied into the context and these dummy values
519 // will never be used.
520 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
521 __ LoadSharedFunctionInfoSpecialField(
522 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset);
523 {
524 Label done_loop, loop;
525 __ bind(&loop);
526 __ subl(rcx, Immediate(1));
527 __ j(carry, &done_loop, Label::kNear);
528 __ PushRoot(Heap::kTheHoleValueRootIndex);
529 __ jmp(&loop);
530 __ bind(&done_loop);
531 }
532
533 // Dispatch on the kind of generator object.
534 Label old_generator;
535 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
536 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
537 __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
538 __ j(not_equal, &old_generator);
539
540 // New-style (ignition/turbofan) generator object.
541 {
542 __ PushReturnAddressFrom(rax);
543 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
544 __ LoadSharedFunctionInfoSpecialField(
545 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
546 // We abuse new.target both to indicate that this is a resume call and to
547 // pass in the generator object. In ordinary calls, new.target is always
548 // undefined because generator functions are non-constructable.
549 __ movp(rdx, rbx);
550 __ jmp(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
551 }
552
553 // Old-style (full-codegen) generator object.
554 __ bind(&old_generator);
555 {
556 // Enter a new JavaScript frame, and initialize its slots as they were when
557 // the generator was suspended.
558 FrameScope scope(masm, StackFrame::MANUAL);
559 __ PushReturnAddressFrom(rax); // Return address.
560 __ Push(rbp); // Caller's frame pointer.
561 __ Move(rbp, rsp);
562 __ Push(rsi); // Callee's context.
563 __ Push(rdi); // Callee's JS Function.
564
565 // Restore the operand stack.
566 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset));
567 __ SmiToInteger32(rax, FieldOperand(rsi, FixedArray::kLengthOffset));
568 {
569 Label done_loop, loop;
570 __ Set(rcx, 0);
571 __ bind(&loop);
572 __ cmpl(rcx, rax);
573 __ j(equal, &done_loop, Label::kNear);
574 __ Push(
575 FieldOperand(rsi, rcx, times_pointer_size, FixedArray::kHeaderSize));
576 __ addl(rcx, Immediate(1));
577 __ jmp(&loop);
578 __ bind(&done_loop);
579 }
580
581 // Reset operand stack so we don't leak.
582 __ LoadRoot(FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset),
583 Heap::kEmptyFixedArrayRootIndex);
584
585 // Restore context.
586 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset));
587
588 // Resume the generator function at the continuation.
589 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
590 __ movp(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
591 __ SmiToInteger64(
592 rcx, FieldOperand(rbx, JSGeneratorObject::kContinuationOffset));
593 __ leap(rdx, FieldOperand(rdx, rcx, times_1, Code::kHeaderSize));
594 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset),
595 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
596 __ movp(rax, rbx); // Continuation expects generator object in rax.
597 __ jmp(rdx);
598 }
599}
Iain Merrick75681382010-08-19 15:07:18 +0100600
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000601// Generate code for entering a JS function with the interpreter.
602// On entry to the function the receiver and arguments have been pushed on the
603// stack left to right. The actual argument count matches the formal parameter
604// count expected by the function.
605//
606// The live registers are:
607// o rdi: the JS function object being called
608// o rdx: the new target
609// o rsi: our context
610// o rbp: the caller's frame pointer
611// o rsp: stack pointer (pointing to return address)
612//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100613// The function builds an interpreter frame. See InterpreterFrameConstants in
614// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000615void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100616 ProfileEntryHookStub::MaybeCallEntryHook(masm);
617
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000618 // Open a frame scope to indicate that there is a frame on the stack. The
619 // MANUAL indicates that the scope shouldn't actually generate code to set up
620 // the frame (that is done below).
621 FrameScope frame_scope(masm, StackFrame::MANUAL);
622 __ pushq(rbp); // Caller's frame pointer.
623 __ movp(rbp, rsp);
624 __ Push(rsi); // Callee's context.
625 __ Push(rdi); // Callee's JS function.
626 __ Push(rdx); // Callee's new target.
627
Ben Murdochc5610432016-08-08 18:44:38 +0100628 // Get the bytecode array from the function object (or from the DebugInfo if
629 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100631 Label load_debug_bytecode_array, bytecode_array_loaded;
632 DCHECK_EQ(Smi::FromInt(0), DebugInfo::uninitialized());
633 __ cmpp(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
634 Immediate(0));
635 __ j(not_equal, &load_debug_bytecode_array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000636 __ movp(kInterpreterBytecodeArrayRegister,
637 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100638 __ bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639
Ben Murdochc5610432016-08-08 18:44:38 +0100640 // Check function data field is actually a BytecodeArray object.
641 Label bytecode_array_not_present;
642 __ CompareRoot(kInterpreterBytecodeArrayRegister,
643 Heap::kUndefinedValueRootIndex);
644 __ j(equal, &bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000645 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
647 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
648 rax);
649 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
650 }
651
Ben Murdochc5610432016-08-08 18:44:38 +0100652 // Load initial bytecode offset.
653 __ movp(kInterpreterBytecodeOffsetRegister,
654 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
655
656 // Push bytecode array and Smi tagged bytecode offset.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100657 __ Push(kInterpreterBytecodeArrayRegister);
Ben Murdochc5610432016-08-08 18:44:38 +0100658 __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister);
659 __ Push(rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100660
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000661 // Allocate the local and temporary register file on the stack.
662 {
663 // Load frame size from the BytecodeArray object.
664 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
665 BytecodeArray::kFrameSizeOffset));
666
667 // Do a stack check to ensure we don't go over the limit.
668 Label ok;
669 __ movp(rdx, rsp);
670 __ subp(rdx, rcx);
671 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
672 __ j(above_equal, &ok, Label::kNear);
673 __ CallRuntime(Runtime::kThrowStackOverflow);
674 __ bind(&ok);
675
676 // If ok, push undefined as the initial value for all register file entries.
677 Label loop_header;
678 Label loop_check;
679 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
680 __ j(always, &loop_check);
681 __ bind(&loop_header);
682 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
683 __ Push(rdx);
684 // Continue loop if not done.
685 __ bind(&loop_check);
686 __ subp(rcx, Immediate(kPointerSize));
687 __ j(greater_equal, &loop_header, Label::kNear);
688 }
689
Ben Murdochc5610432016-08-08 18:44:38 +0100690 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000691 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100692 __ Move(
693 kInterpreterDispatchTableRegister,
694 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000695
696 // Dispatch to the first bytecode handler for the function.
697 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
698 kInterpreterBytecodeOffsetRegister, times_1, 0));
699 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
700 times_pointer_size, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701 __ call(rbx);
Ben Murdochc5610432016-08-08 18:44:38 +0100702 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100703
Ben Murdochc5610432016-08-08 18:44:38 +0100704 // The return value is in rax.
705
706 // Get the arguments + reciever count.
707 __ movp(rbx, Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
708 __ movl(rbx, FieldOperand(rbx, BytecodeArray::kParameterSizeOffset));
709
710 // Leave the frame (also dropping the register file).
711 __ leave();
712
713 // Drop receiver + arguments and return.
714 __ PopReturnAddressTo(rcx);
715 __ addp(rsp, rbx);
716 __ PushReturnAddressFrom(rcx);
717 __ ret(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100718
719 // Load debug copy of the bytecode array.
720 __ bind(&load_debug_bytecode_array);
721 Register debug_info = kInterpreterBytecodeArrayRegister;
722 __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
723 __ movp(kInterpreterBytecodeArrayRegister,
724 FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
725 __ jmp(&bytecode_array_loaded);
Ben Murdochc5610432016-08-08 18:44:38 +0100726
727 // If the bytecode array is no longer present, then the underlying function
728 // has been switched to a different kind of code and we heal the closure by
729 // switching the code entry field over to the new code object as well.
730 __ bind(&bytecode_array_not_present);
731 __ leave(); // Leave the frame so we can tail call.
732 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
733 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
734 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
735 __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx);
736 __ RecordWriteCodeEntryField(rdi, rcx, r15);
737 __ jmp(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000738}
739
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740static void Generate_InterpreterPushArgs(MacroAssembler* masm,
741 bool push_receiver) {
742 // ----------- S t a t e -------------
743 // -- rax : the number of arguments (not including the receiver)
744 // -- rbx : the address of the first argument to be pushed. Subsequent
745 // arguments should be consecutive above this, in the same order as
746 // they are to be pushed onto the stack.
747 // -----------------------------------
748
749 // Find the address of the last argument.
750 __ movp(rcx, rax);
751 if (push_receiver) {
752 __ addp(rcx, Immediate(1)); // Add one for receiver.
753 }
754
755 __ shlp(rcx, Immediate(kPointerSizeLog2));
756 __ negp(rcx);
757 __ addp(rcx, rbx);
758
759 // Push the arguments.
760 Label loop_header, loop_check;
761 __ j(always, &loop_check);
762 __ bind(&loop_header);
763 __ Push(Operand(rbx, 0));
764 __ subp(rbx, Immediate(kPointerSize));
765 __ bind(&loop_check);
766 __ cmpp(rbx, rcx);
767 __ j(greater, &loop_header, Label::kNear);
768}
769
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000770// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100771void Builtins::Generate_InterpreterPushArgsAndCallImpl(
772 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000773 // ----------- S t a t e -------------
774 // -- rax : the number of arguments (not including the receiver)
775 // -- rbx : the address of the first argument to be pushed. Subsequent
776 // arguments should be consecutive above this, in the same order as
777 // they are to be pushed onto the stack.
778 // -- rdi : the target to call (can be any Object).
779 // -----------------------------------
780
781 // Pop return address to allow tail-call after pushing arguments.
782 __ PopReturnAddressTo(kScratchRegister);
783
784 Generate_InterpreterPushArgs(masm, true);
785
786 // Call the target.
787 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100788 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
789 tail_call_mode),
790 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791}
792
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000793// static
794void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
795 // ----------- S t a t e -------------
796 // -- rax : the number of arguments (not including the receiver)
797 // -- rdx : the new target (either the same as the constructor or
798 // the JSFunction on which new was invoked initially)
799 // -- rdi : the constructor to call (can be any Object)
800 // -- rbx : the address of the first argument to be pushed. Subsequent
801 // arguments should be consecutive above this, in the same order as
802 // they are to be pushed onto the stack.
803 // -----------------------------------
804
805 // Pop return address to allow tail-call after pushing arguments.
806 __ PopReturnAddressTo(kScratchRegister);
807
808 // Push slot for the receiver to be constructed.
809 __ Push(Immediate(0));
810
811 Generate_InterpreterPushArgs(masm, false);
812
813 // Push return address in preparation for the tail-call.
814 __ PushReturnAddressFrom(kScratchRegister);
815
816 // Call the constructor (rax, rdx, rdi passed on).
817 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
818}
819
Ben Murdochc5610432016-08-08 18:44:38 +0100820void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
821 // Set the return address to the correct point in the interpreter entry
822 // trampoline.
823 Smi* interpreter_entry_return_pc_offset(
824 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
825 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
826 __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline());
827 __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
828 Code::kHeaderSize - kHeapObjectTag));
829 __ Push(rbx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000830
Ben Murdochc5610432016-08-08 18:44:38 +0100831 // Initialize dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100832 __ Move(
833 kInterpreterDispatchTableRegister,
834 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000835
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000836 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +0100837 __ movp(kInterpreterBytecodeArrayRegister,
838 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000839
840 if (FLAG_debug_code) {
841 // Check function data field is actually a BytecodeArray object.
842 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
843 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
844 rbx);
845 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
846 }
847
848 // Get the target bytecode offset from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +0100849 __ movp(kInterpreterBytecodeOffsetRegister,
850 Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000851 __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
852 kInterpreterBytecodeOffsetRegister);
853
854 // Dispatch to the target bytecode.
855 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
856 kInterpreterBytecodeOffsetRegister, times_1, 0));
857 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
858 times_pointer_size, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000859 __ jmp(rbx);
860}
861
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100863 // ----------- S t a t e -------------
864 // -- rax : argument count (preserved for callee)
865 // -- rdx : new target (preserved for callee)
866 // -- rdi : target function (preserved for callee)
867 // -----------------------------------
868 // First lookup code, maybe we don't need to compile!
869 Label gotta_call_runtime;
870 Label maybe_call_runtime;
871 Label try_shared;
872 Label loop_top, loop_bottom;
873
874 Register closure = rdi;
875 Register map = r8;
876 Register index = r9;
877 __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
878 __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
879 __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset));
880 __ cmpl(index, Immediate(2));
881 __ j(less, &gotta_call_runtime);
882
883 // Find literals.
884 // r14 : native context
885 // r9 : length / index
886 // r8 : optimized code map
887 // rdx : new target
888 // rdi : closure
889 Register native_context = r14;
890 __ movp(native_context, NativeContextOperand());
891
892 __ bind(&loop_top);
893 // Native context match?
894 Register temp = r11;
895 __ movp(temp, FieldOperand(map, index, times_pointer_size,
896 SharedFunctionInfo::kOffsetToPreviousContext));
897 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
898 __ cmpp(temp, native_context);
899 __ j(not_equal, &loop_bottom);
900 // OSR id set to none?
901 __ movp(temp, FieldOperand(map, index, times_pointer_size,
902 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
903 __ SmiToInteger32(temp, temp);
904 const int bailout_id = BailoutId::None().ToInt();
905 __ cmpl(temp, Immediate(bailout_id));
906 __ j(not_equal, &loop_bottom);
907 // Literals available?
908 __ movp(temp, FieldOperand(map, index, times_pointer_size,
909 SharedFunctionInfo::kOffsetToPreviousLiterals));
910 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
911 __ JumpIfSmi(temp, &gotta_call_runtime);
912
913 // Save the literals in the closure.
914 __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp);
915 __ movp(r15, index);
916 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r15,
917 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
918
919 // Code available?
920 Register entry = rcx;
921 __ movp(entry, FieldOperand(map, index, times_pointer_size,
922 SharedFunctionInfo::kOffsetToPreviousCachedCode));
923 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
924 __ JumpIfSmi(entry, &maybe_call_runtime);
925
926 // Found literals and code. Get them into the closure and return.
927 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
928
929 Label install_optimized_code_and_tailcall;
930 __ bind(&install_optimized_code_and_tailcall);
931 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
932 __ RecordWriteCodeEntryField(closure, entry, r15);
933
934 // Link the closure into the optimized function list.
935 // rcx : code entry (entry)
936 // r14 : native context
937 // rdx : new target
938 // rdi : closure
939 __ movp(rbx,
940 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
941 __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx);
942 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, r15,
943 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
944 const int function_list_offset =
945 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
946 __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
947 closure);
948 // Save closure before the write barrier.
949 __ movp(rbx, closure);
950 __ RecordWriteContextSlot(native_context, function_list_offset, closure, r15,
951 kDontSaveFPRegs);
952 __ movp(closure, rbx);
953 __ jmp(entry);
954
955 __ bind(&loop_bottom);
956 __ subl(index, Immediate(SharedFunctionInfo::kEntryLength));
957 __ cmpl(index, Immediate(1));
958 __ j(greater, &loop_top);
959
960 // We found neither literals nor code.
961 __ jmp(&gotta_call_runtime);
962
963 __ bind(&maybe_call_runtime);
964
965 // Last possibility. Check the context free optimized code map entry.
966 __ movp(entry, FieldOperand(map, FixedArray::kHeaderSize +
967 SharedFunctionInfo::kSharedCodeIndex));
968 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
969 __ JumpIfSmi(entry, &try_shared);
970
971 // Store code entry in the closure.
972 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
973 __ jmp(&install_optimized_code_and_tailcall);
974
975 __ bind(&try_shared);
976 // Is the full code valid?
977 __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
978 __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
979 __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset));
980 __ andl(rbx, Immediate(Code::KindField::kMask));
981 __ shrl(rbx, Immediate(Code::KindField::kShift));
982 __ cmpl(rbx, Immediate(Code::BUILTIN));
983 __ j(equal, &gotta_call_runtime);
984 // Yes, install the full code.
985 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
986 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
987 __ RecordWriteCodeEntryField(closure, entry, r15);
988 __ jmp(entry);
989
990 __ bind(&gotta_call_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100991 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Iain Merrick75681382010-08-19 15:07:18 +0100992}
993
Ben Murdochc5610432016-08-08 18:44:38 +0100994void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
995 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
996}
Ben Murdochb0fe1622011-05-05 13:52:32 +0100997
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000998void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100999 GenerateTailCallToReturnedCode(masm,
1000 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001}
1002
1003
1004void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001005 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006}
1007
1008
1009static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1010 // For now, we are relying on the fact that make_code_young doesn't do any
1011 // garbage collection which allows us to save/restore the registers without
1012 // worrying about which of them contain pointers. We also don't build an
1013 // internal frame to make the code faster, since we shouldn't have to do stack
1014 // crawls in MakeCodeYoung. This seems a bit fragile.
1015
1016 // Re-execute the code that was patched back to the young age when
1017 // the stub returns.
1018 __ subp(Operand(rsp, 0), Immediate(5));
1019 __ Pushad();
1020 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
1021 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
1022 { // NOLINT
1023 FrameScope scope(masm, StackFrame::MANUAL);
1024 __ PrepareCallCFunction(2);
1025 __ CallCFunction(
1026 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1027 }
1028 __ Popad();
1029 __ ret(0);
1030}
1031
1032
1033#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1034void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1035 MacroAssembler* masm) { \
1036 GenerateMakeCodeYoungAgainCommon(masm); \
1037} \
1038void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1039 MacroAssembler* masm) { \
1040 GenerateMakeCodeYoungAgainCommon(masm); \
1041}
1042CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1043#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1044
1045
1046void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1047 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1048 // that make_code_young doesn't do any garbage collection which allows us to
1049 // save/restore the registers without worrying about which of them contain
1050 // pointers.
1051 __ Pushad();
1052 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
1053 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
1054 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
1055 { // NOLINT
1056 FrameScope scope(masm, StackFrame::MANUAL);
1057 __ PrepareCallCFunction(2);
1058 __ CallCFunction(
1059 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1060 2);
1061 }
1062 __ Popad();
1063
1064 // Perform prologue operations usually performed by the young code stub.
1065 __ PopReturnAddressTo(kScratchRegister);
1066 __ pushq(rbp); // Caller's frame pointer.
1067 __ movp(rbp, rsp);
1068 __ Push(rsi); // Callee's context.
1069 __ Push(rdi); // Callee's JS Function.
1070 __ PushReturnAddressFrom(kScratchRegister);
1071
1072 // Jump to point after the code-age stub.
1073 __ ret(0);
1074}
1075
1076
1077void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1078 GenerateMakeCodeYoungAgainCommon(masm);
1079}
1080
1081
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001082void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1083 Generate_MarkCodeAsExecutedOnce(masm);
1084}
1085
1086
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001087static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1088 SaveFPRegsMode save_doubles) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001090 {
1091 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001092
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001093 // Preserve registers across notification, this is important for compiled
1094 // stubs that tail call the runtime on deopts passing their parameters in
1095 // registers.
1096 __ Pushad();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001098 __ Popad();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001099 // Tear down internal frame.
1100 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001101
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001102 __ DropUnderReturnAddress(1); // Ignore state offset
1103 __ ret(0); // Return to IC Miss stub, continuation still on stack.
1104}
1105
1106
1107void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1108 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1109}
1110
1111
1112void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1113 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001114}
1115
1116
1117static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1118 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +01001119 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001120 {
1121 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Block1e0659c2011-05-24 12:43:12 +01001122
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001123 // Pass the deoptimization type to the runtime system.
1124 __ Push(Smi::FromInt(static_cast<int>(type)));
Steve Block1e0659c2011-05-24 12:43:12 +01001125
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001126 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001127 // Tear down internal frame.
1128 }
Steve Block1e0659c2011-05-24 12:43:12 +01001129
1130 // Get the full codegen state from the stack and untag it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001131 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001132
1133 // Switch on the state.
Ben Murdoch257744e2011-11-30 15:57:28 +00001134 Label not_no_registers, not_tos_rax;
Ben Murdochc5610432016-08-08 18:44:38 +01001135 __ cmpp(kScratchRegister,
1136 Immediate(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001137 __ j(not_equal, &not_no_registers, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01001138 __ ret(1 * kPointerSize); // Remove state.
1139
1140 __ bind(&not_no_registers);
Ben Murdochc5610432016-08-08 18:44:38 +01001141 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001142 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001143 __ cmpp(kScratchRegister,
1144 Immediate(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001145 __ j(not_equal, &not_tos_rax, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01001146 __ ret(2 * kPointerSize); // Remove state, rax.
1147
1148 __ bind(&not_tos_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001149 __ Abort(kNoCasesLeft);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001150}
1151
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001152
Ben Murdochb0fe1622011-05-05 13:52:32 +01001153void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1154 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1155}
1156
1157
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001158void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1159 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1160}
1161
1162
Ben Murdochb0fe1622011-05-05 13:52:32 +01001163void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001164 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001165}
1166
1167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001168// static
1169void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1170 int field_index) {
1171 // ----------- S t a t e -------------
1172 // -- rsp[0] : return address
1173 // -- rsp[8] : receiver
1174 // -----------------------------------
1175
1176 // 1. Load receiver into rax and check that it's actually a JSDate object.
1177 Label receiver_not_date;
1178 {
1179 StackArgumentsAccessor args(rsp, 0);
1180 __ movp(rax, args.GetReceiverOperand());
1181 __ JumpIfSmi(rax, &receiver_not_date);
1182 __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
1183 __ j(not_equal, &receiver_not_date);
1184 }
1185
1186 // 2. Load the specified date field, falling back to the runtime as necessary.
1187 if (field_index == JSDate::kDateValue) {
1188 __ movp(rax, FieldOperand(rax, JSDate::kValueOffset));
1189 } else {
1190 if (field_index < JSDate::kFirstUncachedField) {
1191 Label stamp_mismatch;
1192 __ Load(rdx, ExternalReference::date_cache_stamp(masm->isolate()));
1193 __ cmpp(rdx, FieldOperand(rax, JSDate::kCacheStampOffset));
1194 __ j(not_equal, &stamp_mismatch, Label::kNear);
1195 __ movp(rax, FieldOperand(
1196 rax, JSDate::kValueOffset + field_index * kPointerSize));
1197 __ ret(1 * kPointerSize);
1198 __ bind(&stamp_mismatch);
1199 }
1200 FrameScope scope(masm, StackFrame::INTERNAL);
1201 __ PrepareCallCFunction(2);
1202 __ Move(arg_reg_1, rax);
1203 __ Move(arg_reg_2, Smi::FromInt(field_index));
1204 __ CallCFunction(
1205 ExternalReference::get_date_field_function(masm->isolate()), 2);
1206 }
1207 __ ret(1 * kPointerSize);
1208
1209 // 3. Raise a TypeError if the receiver is not a date.
1210 __ bind(&receiver_not_date);
1211 {
1212 FrameScope scope(masm, StackFrame::MANUAL);
1213 __ EnterFrame(StackFrame::INTERNAL);
1214 __ CallRuntime(Runtime::kThrowNotDateError);
1215 }
1216}
1217
Ben Murdochda12d292016-06-02 14:46:10 +01001218// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001219void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1220 // ----------- S t a t e -------------
1221 // -- rax : argc
1222 // -- rsp[0] : return address
1223 // -- rsp[8] : argArray
1224 // -- rsp[16] : thisArg
1225 // -- rsp[24] : receiver
1226 // -----------------------------------
1227
1228 // 1. Load receiver into rdi, argArray into rax (if present), remove all
1229 // arguments from the stack (including the receiver), and push thisArg (if
1230 // present) instead.
1231 {
1232 Label no_arg_array, no_this_arg;
1233 StackArgumentsAccessor args(rsp, rax);
1234 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1235 __ movp(rbx, rdx);
1236 __ movp(rdi, args.GetReceiverOperand());
1237 __ testp(rax, rax);
1238 __ j(zero, &no_this_arg, Label::kNear);
1239 {
1240 __ movp(rdx, args.GetArgumentOperand(1));
1241 __ cmpp(rax, Immediate(1));
1242 __ j(equal, &no_arg_array, Label::kNear);
1243 __ movp(rbx, args.GetArgumentOperand(2));
1244 __ bind(&no_arg_array);
1245 }
1246 __ bind(&no_this_arg);
1247 __ PopReturnAddressTo(rcx);
1248 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1249 __ Push(rdx);
1250 __ PushReturnAddressFrom(rcx);
1251 __ movp(rax, rbx);
1252 }
1253
1254 // ----------- S t a t e -------------
1255 // -- rax : argArray
1256 // -- rdi : receiver
1257 // -- rsp[0] : return address
1258 // -- rsp[8] : thisArg
1259 // -----------------------------------
1260
1261 // 2. Make sure the receiver is actually callable.
1262 Label receiver_not_callable;
1263 __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
1264 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1265 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1266 Immediate(1 << Map::kIsCallable));
1267 __ j(zero, &receiver_not_callable, Label::kNear);
1268
1269 // 3. Tail call with no arguments if argArray is null or undefined.
1270 Label no_arguments;
1271 __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1272 __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments,
1273 Label::kNear);
1274
1275 // 4a. Apply the receiver to the given argArray (passing undefined for
1276 // new.target).
1277 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1278 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1279
1280 // 4b. The argArray is either null or undefined, so we tail call without any
Ben Murdoch097c5b22016-05-18 11:27:45 +01001281 // arguments to the receiver. Since we did not create a frame for
1282 // Function.prototype.apply() yet, we use a normal Call builtin here.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001283 __ bind(&no_arguments);
1284 {
1285 __ Set(rax, 0);
1286 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1287 }
1288
1289 // 4c. The receiver is not callable, throw an appropriate TypeError.
1290 __ bind(&receiver_not_callable);
1291 {
1292 StackArgumentsAccessor args(rsp, 0);
1293 __ movp(args.GetReceiverOperand(), rdi);
1294 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1295 }
1296}
1297
1298
1299// static
1300void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001301 // Stack Layout:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001302 // rsp[0] : Return address
1303 // rsp[8] : Argument n
1304 // rsp[16] : Argument n-1
Ben Murdochb0fe1622011-05-05 13:52:32 +01001305 // ...
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001306 // rsp[8 * n] : Argument 1
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307 // rsp[8 * (n + 1)] : Receiver (callable to call)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001308 //
1309 // rax contains the number of arguments, n, not counting the receiver.
1310 //
1311 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001312 {
1313 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314 __ testp(rax, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315 __ j(not_zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001316 __ PopReturnAddressTo(rbx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001317 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 __ PushReturnAddressFrom(rbx);
1319 __ incp(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001320 __ bind(&done);
1321 }
1322
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323 // 2. Get the callable to call (passed as receiver) from the stack.
1324 {
1325 StackArgumentsAccessor args(rsp, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001326 __ movp(rdi, args.GetReceiverOperand());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001327 }
1328
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001329 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb0fe1622011-05-05 13:52:32 +01001330 // (overwriting the original receiver). Adjust argument count to make
1331 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001332 {
1333 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001334 __ movp(rcx, rax);
1335 StackArgumentsAccessor args(rsp, rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001336 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001337 __ movp(rbx, args.GetArgumentOperand(1));
1338 __ movp(args.GetArgumentOperand(0), rbx);
1339 __ decp(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340 __ j(not_zero, &loop); // While non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001341 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1342 __ decp(rax); // One fewer argument (first argument is new receiver).
Ben Murdochb0fe1622011-05-05 13:52:32 +01001343 }
1344
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001345 // 4. Call the callable.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001346 // Since we did not create a frame for Function.prototype.call() yet,
1347 // we use a normal Call builtin here.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001348 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001349}
1350
1351
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001352void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1353 // ----------- S t a t e -------------
1354 // -- rax : argc
1355 // -- rsp[0] : return address
1356 // -- rsp[8] : argumentsList
1357 // -- rsp[16] : thisArgument
1358 // -- rsp[24] : target
1359 // -- rsp[32] : receiver
1360 // -----------------------------------
1361
1362 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1363 // remove all arguments from the stack (including the receiver), and push
1364 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001365 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001366 Label done;
1367 StackArgumentsAccessor args(rsp, rax);
1368 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1369 __ movp(rdx, rdi);
1370 __ movp(rbx, rdi);
1371 __ cmpp(rax, Immediate(1));
1372 __ j(below, &done, Label::kNear);
1373 __ movp(rdi, args.GetArgumentOperand(1)); // target
1374 __ j(equal, &done, Label::kNear);
1375 __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument
1376 __ cmpp(rax, Immediate(3));
1377 __ j(below, &done, Label::kNear);
1378 __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList
1379 __ bind(&done);
1380 __ PopReturnAddressTo(rcx);
1381 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1382 __ Push(rdx);
1383 __ PushReturnAddressFrom(rcx);
1384 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001385 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001386
1387 // ----------- S t a t e -------------
1388 // -- rax : argumentsList
1389 // -- rdi : target
1390 // -- rsp[0] : return address
1391 // -- rsp[8] : thisArgument
1392 // -----------------------------------
1393
1394 // 2. Make sure the target is actually callable.
1395 Label target_not_callable;
1396 __ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
1397 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1398 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1399 Immediate(1 << Map::kIsCallable));
1400 __ j(zero, &target_not_callable, Label::kNear);
1401
1402 // 3a. Apply the target to the given argumentsList (passing undefined for
1403 // new.target).
1404 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1405 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1406
1407 // 3b. The target is not callable, throw an appropriate TypeError.
1408 __ bind(&target_not_callable);
1409 {
1410 StackArgumentsAccessor args(rsp, 0);
1411 __ movp(args.GetReceiverOperand(), rdi);
1412 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1413 }
1414}
1415
1416
1417void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1418 // ----------- S t a t e -------------
1419 // -- rax : argc
1420 // -- rsp[0] : return address
1421 // -- rsp[8] : new.target (optional)
1422 // -- rsp[16] : argumentsList
1423 // -- rsp[24] : target
1424 // -- rsp[32] : receiver
1425 // -----------------------------------
1426
1427 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1428 // new.target into rdx (if present, otherwise use target), remove all
1429 // arguments from the stack (including the receiver), and push thisArgument
1430 // (if present) instead.
1431 {
1432 Label done;
1433 StackArgumentsAccessor args(rsp, rax);
1434 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1435 __ movp(rdx, rdi);
1436 __ movp(rbx, rdi);
1437 __ cmpp(rax, Immediate(1));
1438 __ j(below, &done, Label::kNear);
1439 __ movp(rdi, args.GetArgumentOperand(1)); // target
1440 __ movp(rdx, rdi); // new.target defaults to target
1441 __ j(equal, &done, Label::kNear);
1442 __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList
1443 __ cmpp(rax, Immediate(3));
1444 __ j(below, &done, Label::kNear);
1445 __ movp(rdx, args.GetArgumentOperand(3)); // new.target
1446 __ bind(&done);
1447 __ PopReturnAddressTo(rcx);
1448 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1449 __ PushRoot(Heap::kUndefinedValueRootIndex);
1450 __ PushReturnAddressFrom(rcx);
1451 __ movp(rax, rbx);
1452 }
1453
1454 // ----------- S t a t e -------------
1455 // -- rax : argumentsList
1456 // -- rdx : new.target
1457 // -- rdi : target
1458 // -- rsp[0] : return address
1459 // -- rsp[8] : receiver (undefined)
1460 // -----------------------------------
1461
1462 // 2. Make sure the target is actually a constructor.
1463 Label target_not_constructor;
1464 __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear);
1465 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1466 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1467 Immediate(1 << Map::kIsConstructor));
1468 __ j(zero, &target_not_constructor, Label::kNear);
1469
1470 // 3. Make sure the target is actually a constructor.
1471 Label new_target_not_constructor;
1472 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1473 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1474 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1475 Immediate(1 << Map::kIsConstructor));
1476 __ j(zero, &new_target_not_constructor, Label::kNear);
1477
1478 // 4a. Construct the target with the given new.target and argumentsList.
1479 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1480
1481 // 4b. The target is not a constructor, throw an appropriate TypeError.
1482 __ bind(&target_not_constructor);
1483 {
1484 StackArgumentsAccessor args(rsp, 0);
1485 __ movp(args.GetReceiverOperand(), rdi);
1486 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1487 }
1488
1489 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1490 __ bind(&new_target_not_constructor);
1491 {
1492 StackArgumentsAccessor args(rsp, 0);
1493 __ movp(args.GetReceiverOperand(), rdx);
1494 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1495 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001496}
1497
1498
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001499void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1500 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001501 // -- rax : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001502 // -- rsp[0] : return address
1503 // -- rsp[8] : last argument
1504 // -----------------------------------
1505 Label generic_array_code;
1506
1507 // Get the InternalArray function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001508 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001509
1510 if (FLAG_debug_code) {
1511 // Initial map for the builtin InternalArray functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001512 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001513 // Will both indicate a NULL and a Smi.
1514 STATIC_ASSERT(kSmiTag == 0);
1515 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001516 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001517 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001518 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001519 }
1520
1521 // Run the native code for the InternalArray function called as a normal
1522 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001523 // tail call a stub
1524 InternalArrayConstructorStub stub(masm->isolate());
1525 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001526}
1527
1528
1529void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1530 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001531 // -- rax : argc
Ben Murdochb0fe1622011-05-05 13:52:32 +01001532 // -- rsp[0] : return address
1533 // -- rsp[8] : last argument
1534 // -----------------------------------
1535 Label generic_array_code;
1536
1537 // Get the Array function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001538 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001539
1540 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001541 // Initial map for the builtin Array functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001542 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001543 // Will both indicate a NULL and a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001544 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001545 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001546 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001547 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001548 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001549 }
1550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001551 __ movp(rdx, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001552 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001553 // tail call a stub
1554 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1555 ArrayConstructorStub stub(masm->isolate());
1556 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001557}
1558
1559
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001561void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1562 // ----------- S t a t e -------------
1563 // -- rax : number of arguments
1564 // -- rsp[0] : return address
1565 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1566 // -- rsp[(argc + 1) * 8] : receiver
1567 // -----------------------------------
1568 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1569 Heap::RootListIndex const root_index =
1570 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1571 : Heap::kMinusInfinityValueRootIndex;
1572 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0;
1573
1574 // Load the accumulator with the default return value (either -Infinity or
1575 // +Infinity), with the tagged value in rdx and the double value in xmm0.
1576 __ LoadRoot(rdx, root_index);
1577 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1578 __ Move(rcx, rax);
1579
1580 Label done_loop, loop;
1581 __ bind(&loop);
1582 {
1583 // Check if all parameters done.
1584 __ testp(rcx, rcx);
1585 __ j(zero, &done_loop);
1586
1587 // Load the next parameter tagged value into rbx.
1588 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
1589
1590 // Load the double value of the parameter into xmm1, maybe converting the
1591 // parameter to a number first using the ToNumberStub if necessary.
1592 Label convert, convert_smi, convert_number, done_convert;
1593 __ bind(&convert);
1594 __ JumpIfSmi(rbx, &convert_smi);
1595 __ JumpIfRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1596 Heap::kHeapNumberMapRootIndex, &convert_number);
1597 {
1598 // Parameter is not a Number, use the ToNumberStub to convert it.
1599 FrameScope scope(masm, StackFrame::INTERNAL);
1600 __ Integer32ToSmi(rax, rax);
1601 __ Integer32ToSmi(rcx, rcx);
1602 __ Push(rax);
1603 __ Push(rcx);
1604 __ Push(rdx);
1605 __ movp(rax, rbx);
1606 ToNumberStub stub(masm->isolate());
1607 __ CallStub(&stub);
1608 __ movp(rbx, rax);
1609 __ Pop(rdx);
1610 __ Pop(rcx);
1611 __ Pop(rax);
1612 {
1613 // Restore the double accumulator value (xmm0).
1614 Label restore_smi, done_restore;
1615 __ JumpIfSmi(rdx, &restore_smi, Label::kNear);
1616 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1617 __ jmp(&done_restore, Label::kNear);
1618 __ bind(&restore_smi);
1619 __ SmiToDouble(xmm0, rdx);
1620 __ bind(&done_restore);
1621 }
1622 __ SmiToInteger32(rcx, rcx);
1623 __ SmiToInteger32(rax, rax);
1624 }
1625 __ jmp(&convert);
1626 __ bind(&convert_number);
1627 __ Movsd(xmm1, FieldOperand(rbx, HeapNumber::kValueOffset));
1628 __ jmp(&done_convert, Label::kNear);
1629 __ bind(&convert_smi);
1630 __ SmiToDouble(xmm1, rbx);
1631 __ bind(&done_convert);
1632
1633 // Perform the actual comparison with the accumulator value on the left hand
1634 // side (xmm0) and the next parameter value on the right hand side (xmm1).
1635 Label compare_equal, compare_nan, compare_swap, done_compare;
1636 __ Ucomisd(xmm0, xmm1);
1637 __ j(parity_even, &compare_nan, Label::kNear);
1638 __ j(cc, &done_compare, Label::kNear);
1639 __ j(equal, &compare_equal, Label::kNear);
1640
1641 // Result is on the right hand side.
1642 __ bind(&compare_swap);
1643 __ Movaps(xmm0, xmm1);
1644 __ Move(rdx, rbx);
1645 __ jmp(&done_compare, Label::kNear);
1646
1647 // At least one side is NaN, which means that the result will be NaN too.
1648 __ bind(&compare_nan);
1649 __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1650 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1651 __ jmp(&done_compare, Label::kNear);
1652
1653 // Left and right hand side are equal, check for -0 vs. +0.
1654 __ bind(&compare_equal);
1655 __ Movmskpd(kScratchRegister, reg);
1656 __ testl(kScratchRegister, Immediate(1));
1657 __ j(not_zero, &compare_swap);
1658
1659 __ bind(&done_compare);
1660 __ decp(rcx);
1661 __ jmp(&loop);
1662 }
1663
1664 __ bind(&done_loop);
1665 __ PopReturnAddressTo(rcx);
1666 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1667 __ PushReturnAddressFrom(rcx);
1668 __ movp(rax, rdx);
1669 __ Ret();
1670}
1671
1672// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001673void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001674 // ----------- S t a t e -------------
1675 // -- rax : number of arguments
1676 // -- rdi : constructor function
1677 // -- rsp[0] : return address
1678 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1679 // -- rsp[(argc + 1) * 8] : receiver
1680 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001681
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682 // 1. Load the first argument into rax and get rid of the rest (including the
1683 // receiver).
1684 Label no_arguments;
1685 {
1686 StackArgumentsAccessor args(rsp, rax);
1687 __ testp(rax, rax);
1688 __ j(zero, &no_arguments, Label::kNear);
1689 __ movp(rbx, args.GetArgumentOperand(1));
1690 __ PopReturnAddressTo(rcx);
1691 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1692 __ PushReturnAddressFrom(rcx);
1693 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001694 }
1695
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001696 // 2a. Convert the first argument to a number.
1697 ToNumberStub stub(masm->isolate());
1698 __ TailCallStub(&stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001699
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001700 // 2b. No arguments, return +0 (already in rax).
1701 __ bind(&no_arguments);
1702 __ ret(1 * kPointerSize);
1703}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001704
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705
1706// static
1707void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001708 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 // -- rax : number of arguments
1710 // -- rdi : constructor function
1711 // -- rdx : new target
1712 // -- rsp[0] : return address
1713 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1714 // -- rsp[(argc + 1) * 8] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001715 // -----------------------------------
1716
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001717 // 1. Make sure we operate in the context of the called function.
1718 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001719
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001720 // 2. Load the first argument into rbx and get rid of the rest (including the
1721 // receiver).
1722 {
1723 StackArgumentsAccessor args(rsp, rax);
1724 Label no_arguments, done;
1725 __ testp(rax, rax);
1726 __ j(zero, &no_arguments, Label::kNear);
1727 __ movp(rbx, args.GetArgumentOperand(1));
1728 __ jmp(&done, Label::kNear);
1729 __ bind(&no_arguments);
1730 __ Move(rbx, Smi::FromInt(0));
1731 __ bind(&done);
1732 __ PopReturnAddressTo(rcx);
1733 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1734 __ PushReturnAddressFrom(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001735 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001736
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001737 // 3. Make sure rbx is a number.
1738 {
1739 Label done_convert;
1740 __ JumpIfSmi(rbx, &done_convert);
1741 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1742 Heap::kHeapNumberMapRootIndex);
1743 __ j(equal, &done_convert);
1744 {
1745 FrameScope scope(masm, StackFrame::INTERNAL);
1746 __ Push(rdx);
1747 __ Push(rdi);
1748 __ Move(rax, rbx);
1749 ToNumberStub stub(masm->isolate());
1750 __ CallStub(&stub);
1751 __ Move(rbx, rax);
1752 __ Pop(rdi);
1753 __ Pop(rdx);
1754 }
1755 __ bind(&done_convert);
1756 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001757
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001758 // 4. Check if new target and constructor differ.
1759 Label new_object;
1760 __ cmpp(rdx, rdi);
1761 __ j(not_equal, &new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001762
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001763 // 5. Allocate a JSValue wrapper for the number.
1764 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1765 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001766
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001767 // 6. Fallback to the runtime to create new object.
1768 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001769 {
1770 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001771 __ Push(rbx); // the first argument
Ben Murdoch097c5b22016-05-18 11:27:45 +01001772 FastNewObjectStub stub(masm->isolate());
1773 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001774 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001775 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001776 __ Ret();
1777}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001778
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001779
1780// static
1781void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1782 // ----------- S t a t e -------------
1783 // -- rax : number of arguments
1784 // -- rdi : constructor function
1785 // -- rsp[0] : return address
1786 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1787 // -- rsp[(argc + 1) * 8] : receiver
1788 // -----------------------------------
1789
1790 // 1. Load the first argument into rax and get rid of the rest (including the
1791 // receiver).
1792 Label no_arguments;
1793 {
1794 StackArgumentsAccessor args(rsp, rax);
1795 __ testp(rax, rax);
1796 __ j(zero, &no_arguments, Label::kNear);
1797 __ movp(rbx, args.GetArgumentOperand(1));
1798 __ PopReturnAddressTo(rcx);
1799 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1800 __ PushReturnAddressFrom(rcx);
1801 __ movp(rax, rbx);
1802 }
1803
1804 // 2a. At least one argument, return rax if it's a string, otherwise
1805 // dispatch to appropriate conversion.
1806 Label to_string, symbol_descriptive_string;
1807 {
1808 __ JumpIfSmi(rax, &to_string, Label::kNear);
1809 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1810 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
1811 __ j(above, &to_string, Label::kNear);
1812 __ j(equal, &symbol_descriptive_string, Label::kNear);
1813 __ Ret();
1814 }
1815
1816 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001817 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001818 {
1819 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
1820 __ ret(1 * kPointerSize);
1821 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001822
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 // 3a. Convert rax to a string.
1824 __ bind(&to_string);
1825 {
1826 ToStringStub stub(masm->isolate());
1827 __ TailCallStub(&stub);
1828 }
1829
1830 // 3b. Convert symbol in rax to a string.
1831 __ bind(&symbol_descriptive_string);
1832 {
1833 __ PopReturnAddressTo(rcx);
1834 __ Push(rax);
1835 __ PushReturnAddressFrom(rcx);
1836 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1837 }
1838}
1839
1840
1841// static
1842void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1843 // ----------- S t a t e -------------
1844 // -- rax : number of arguments
1845 // -- rdi : constructor function
1846 // -- rdx : new target
1847 // -- rsp[0] : return address
1848 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1849 // -- rsp[(argc + 1) * 8] : receiver
1850 // -----------------------------------
1851
1852 // 1. Make sure we operate in the context of the called function.
1853 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1854
1855 // 2. Load the first argument into rbx and get rid of the rest (including the
1856 // receiver).
1857 {
1858 StackArgumentsAccessor args(rsp, rax);
1859 Label no_arguments, done;
1860 __ testp(rax, rax);
1861 __ j(zero, &no_arguments, Label::kNear);
1862 __ movp(rbx, args.GetArgumentOperand(1));
1863 __ jmp(&done, Label::kNear);
1864 __ bind(&no_arguments);
1865 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1866 __ bind(&done);
1867 __ PopReturnAddressTo(rcx);
1868 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1869 __ PushReturnAddressFrom(rcx);
1870 }
1871
1872 // 3. Make sure rbx is a string.
1873 {
1874 Label convert, done_convert;
1875 __ JumpIfSmi(rbx, &convert, Label::kNear);
1876 __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx);
1877 __ j(below, &done_convert);
1878 __ bind(&convert);
1879 {
1880 FrameScope scope(masm, StackFrame::INTERNAL);
1881 ToStringStub stub(masm->isolate());
1882 __ Push(rdx);
1883 __ Push(rdi);
1884 __ Move(rax, rbx);
1885 __ CallStub(&stub);
1886 __ Move(rbx, rax);
1887 __ Pop(rdi);
1888 __ Pop(rdx);
1889 }
1890 __ bind(&done_convert);
1891 }
1892
1893 // 4. Check if new target and constructor differ.
1894 Label new_object;
1895 __ cmpp(rdx, rdi);
1896 __ j(not_equal, &new_object);
1897
1898 // 5. Allocate a JSValue wrapper for the string.
1899 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1900 __ Ret();
1901
1902 // 6. Fallback to the runtime to create new object.
1903 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001904 {
1905 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001906 __ Push(rbx); // the first argument
Ben Murdoch097c5b22016-05-18 11:27:45 +01001907 FastNewObjectStub stub(masm->isolate());
1908 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001909 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001910 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001911 __ Ret();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001912}
1913
1914
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001915static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1916 Label* stack_overflow) {
1917 // ----------- S t a t e -------------
1918 // -- rax : actual number of arguments
1919 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001920 // -- rdx : new target (passed through to callee)
1921 // -- rdi : function (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001922 // -----------------------------------
1923 // Check the stack for overflow. We are not trying to catch
1924 // interruptions (e.g. debug break and preemption) here, so the "real stack
1925 // limit" is checked.
1926 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001927 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001928 __ movp(rcx, rsp);
1929 // Make rcx the space we have left. The stack might already be overflowed
1930 // here which will cause rcx to become negative.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001931 __ subp(rcx, r8);
1932 // Make r8 the space we need for the array when it is unrolled onto the
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001933 // stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001934 __ movp(r8, rbx);
1935 __ shlp(r8, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001936 // Check if the arguments will overflow the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001937 __ cmpp(rcx, r8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001938 __ j(less_equal, stack_overflow); // Signed comparison.
1939}
1940
1941
Ben Murdochb0fe1622011-05-05 13:52:32 +01001942static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001943 __ pushq(rbp);
1944 __ movp(rbp, rsp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001945
1946 // Store the arguments adaptor context sentinel.
1947 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1948
1949 // Push the function on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001950 __ Push(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001951
Ben Murdoch257744e2011-11-30 15:57:28 +00001952 // Preserve the number of arguments on the stack. Must preserve rax,
1953 // rbx and rcx because these registers are used when copying the
Ben Murdochb0fe1622011-05-05 13:52:32 +01001954 // arguments and the receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00001955 __ Integer32ToSmi(r8, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001956 __ Push(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001957}
1958
1959
1960static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1961 // Retrieve the number of arguments from the stack. Number is a Smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001962 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001963
1964 // Leave the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001965 __ movp(rsp, rbp);
1966 __ popq(rbp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001967
1968 // Remove caller arguments from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969 __ PopReturnAddressTo(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001970 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001971 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1972 __ PushReturnAddressFrom(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001973}
1974
Ben Murdochc5610432016-08-08 18:44:38 +01001975// static
1976void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
1977 // ----------- S t a t e -------------
1978 // -- rdx : requested object size (untagged)
1979 // -- rsp[0] : return address
1980 // -----------------------------------
1981 __ Integer32ToSmi(rdx, rdx);
1982 __ PopReturnAddressTo(rcx);
1983 __ Push(rdx);
1984 __ PushReturnAddressFrom(rcx);
1985 __ Move(rsi, Smi::FromInt(0));
1986 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
1987}
1988
1989// static
1990void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
1991 // ----------- S t a t e -------------
1992 // -- rdx : requested object size (untagged)
1993 // -- rsp[0] : return address
1994 // -----------------------------------
1995 __ Integer32ToSmi(rdx, rdx);
1996 __ PopReturnAddressTo(rcx);
1997 __ Push(rdx);
1998 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
1999 __ PushReturnAddressFrom(rcx);
2000 __ Move(rsi, Smi::FromInt(0));
2001 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2002}
Ben Murdochb0fe1622011-05-05 13:52:32 +01002003
2004void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2005 // ----------- S t a t e -------------
2006 // -- rax : actual number of arguments
2007 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002008 // -- rdx : new target (passed through to callee)
2009 // -- rdi : function (passed through to callee)
Ben Murdochb0fe1622011-05-05 13:52:32 +01002010 // -----------------------------------
2011
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002012 Label invoke, dont_adapt_arguments, stack_overflow;
Steve Block44f0eee2011-05-26 01:26:41 +01002013 Counters* counters = masm->isolate()->counters();
2014 __ IncrementCounter(counters->arguments_adaptors(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002015
2016 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002017 __ cmpp(rax, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002018 __ j(less, &too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002019 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002020 __ j(equal, &dont_adapt_arguments);
2021
2022 { // Enough parameters: Actual >= expected.
2023 __ bind(&enough);
2024 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002025 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002026
2027 // Copy receiver and all expected arguments.
2028 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002029 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002030 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01002031
2032 Label copy;
2033 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002034 __ incp(r8);
2035 __ Push(Operand(rax, 0));
2036 __ subp(rax, Immediate(kPointerSize));
2037 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002038 __ j(less, &copy);
2039 __ jmp(&invoke);
2040 }
2041
2042 { // Too few parameters: Actual < expected.
2043 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002044
Ben Murdochb0fe1622011-05-05 13:52:32 +01002045 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002046 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002047
2048 // Copy receiver and all actual arguments.
2049 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002050 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002051 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01002052
2053 Label copy;
2054 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002055 __ incp(r8);
2056 __ Push(Operand(rdi, 0));
2057 __ subp(rdi, Immediate(kPointerSize));
2058 __ cmpp(r8, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002059 __ j(less, &copy);
2060
2061 // Fill remaining expected arguments with undefined values.
2062 Label fill;
2063 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
2064 __ bind(&fill);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065 __ incp(r8);
2066 __ Push(kScratchRegister);
2067 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002068 __ j(less, &fill);
2069
2070 // Restore function pointer.
Ben Murdochda12d292016-06-02 14:46:10 +01002071 __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002072 }
2073
2074 // Call the entry point.
2075 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002076 __ movp(rax, rbx);
2077 // rax : expected number of arguments
2078 // rdx : new target (passed through to callee)
2079 // rdi : function (passed through to callee)
2080 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2081 __ call(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002082
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002083 // Store offset of return address for deoptimizer.
2084 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2085
Ben Murdochb0fe1622011-05-05 13:52:32 +01002086 // Leave frame and return.
2087 LeaveArgumentsAdaptorFrame(masm);
2088 __ ret(0);
2089
2090 // -------------------------------------------
2091 // Dont adapt arguments.
2092 // -------------------------------------------
2093 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002094 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2095 __ jmp(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096
2097 __ bind(&stack_overflow);
2098 {
2099 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002100 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002101 __ int3();
2102 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002103}
2104
2105
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002106// static
2107void Builtins::Generate_Apply(MacroAssembler* masm) {
2108 // ----------- S t a t e -------------
2109 // -- rax : argumentsList
2110 // -- rdi : target
2111 // -- rdx : new.target (checked to be constructor or undefined)
2112 // -- rsp[0] : return address.
2113 // -- rsp[8] : thisArgument
2114 // -----------------------------------
2115
2116 // Create the list of arguments from the array-like argumentsList.
2117 {
2118 Label create_arguments, create_array, create_runtime, done_create;
2119 __ JumpIfSmi(rax, &create_runtime);
2120
2121 // Load the map of argumentsList into rcx.
2122 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
2123
2124 // Load native context into rbx.
2125 __ movp(rbx, NativeContextOperand());
2126
2127 // Check if argumentsList is an (unmodified) arguments object.
2128 __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2129 __ j(equal, &create_arguments);
2130 __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2131 __ j(equal, &create_arguments);
2132
2133 // Check if argumentsList is a fast JSArray.
2134 __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
2135 __ j(equal, &create_array);
2136
2137 // Ask the runtime to create the list (actually a FixedArray).
2138 __ bind(&create_runtime);
2139 {
2140 FrameScope scope(masm, StackFrame::INTERNAL);
2141 __ Push(rdi);
2142 __ Push(rdx);
2143 __ Push(rax);
2144 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2145 __ Pop(rdx);
2146 __ Pop(rdi);
2147 __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset));
2148 }
2149 __ jmp(&done_create);
2150
2151 // Try to create the list from an arguments object.
2152 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002153 __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002154 __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset));
2155 __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2156 __ j(not_equal, &create_runtime);
2157 __ SmiToInteger32(rbx, rbx);
2158 __ movp(rax, rcx);
2159 __ jmp(&done_create);
2160
2161 // Try to create the list from a JSArray object.
2162 __ bind(&create_array);
2163 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2164 __ DecodeField<Map::ElementsKindBits>(rcx);
2165 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2166 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2167 STATIC_ASSERT(FAST_ELEMENTS == 2);
2168 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
2169 __ j(above, &create_runtime);
2170 __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2171 __ j(equal, &create_runtime);
2172 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
2173 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
2174
2175 __ bind(&done_create);
2176 }
2177
2178 // Check for stack overflow.
2179 {
2180 // Check the stack for overflow. We are not trying to catch interruptions
2181 // (i.e. debug break and preemption) here, so check the "real stack limit".
2182 Label done;
2183 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
2184 __ movp(rcx, rsp);
2185 // Make rcx the space we have left. The stack might already be overflowed
2186 // here which will cause rcx to become negative.
2187 __ subp(rcx, kScratchRegister);
2188 __ sarp(rcx, Immediate(kPointerSizeLog2));
2189 // Check if the arguments will overflow the stack.
2190 __ cmpp(rcx, rbx);
2191 __ j(greater, &done, Label::kNear); // Signed comparison.
2192 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2193 __ bind(&done);
2194 }
2195
2196 // ----------- S t a t e -------------
2197 // -- rdi : target
2198 // -- rax : args (a FixedArray built from argumentsList)
2199 // -- rbx : len (number of elements to push from args)
2200 // -- rdx : new.target (checked to be constructor or undefined)
2201 // -- rsp[0] : return address.
2202 // -- rsp[8] : thisArgument
2203 // -----------------------------------
2204
2205 // Push arguments onto the stack (thisArgument is already on the stack).
2206 {
2207 __ PopReturnAddressTo(r8);
2208 __ Set(rcx, 0);
2209 Label done, loop;
2210 __ bind(&loop);
2211 __ cmpl(rcx, rbx);
2212 __ j(equal, &done, Label::kNear);
2213 __ Push(
2214 FieldOperand(rax, rcx, times_pointer_size, FixedArray::kHeaderSize));
2215 __ incl(rcx);
2216 __ jmp(&loop);
2217 __ bind(&done);
2218 __ PushReturnAddressFrom(r8);
2219 __ Move(rax, rcx);
2220 }
2221
2222 // Dispatch to Call or Construct depending on whether new.target is undefined.
2223 {
2224 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2225 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2226 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2227 }
2228}
2229
Ben Murdoch097c5b22016-05-18 11:27:45 +01002230namespace {
2231
2232// Drops top JavaScript frame and an arguments adaptor frame below it (if
2233// present) preserving all the arguments prepared for current call.
2234// Does nothing if debugger is currently active.
2235// ES6 14.6.3. PrepareForTailCall
2236//
2237// Stack structure for the function g() tail calling f():
2238//
2239// ------- Caller frame: -------
2240// | ...
2241// | g()'s arg M
2242// | ...
2243// | g()'s arg 1
2244// | g()'s receiver arg
2245// | g()'s caller pc
2246// ------- g()'s frame: -------
2247// | g()'s caller fp <- fp
2248// | g()'s context
2249// | function pointer: g
2250// | -------------------------
2251// | ...
2252// | ...
2253// | f()'s arg N
2254// | ...
2255// | f()'s arg 1
2256// | f()'s receiver arg
2257// | f()'s caller pc <- sp
2258// ----------------------
2259//
2260void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2261 Register scratch1, Register scratch2,
2262 Register scratch3) {
2263 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2264 Comment cmnt(masm, "[ PrepareForTailCall");
2265
Ben Murdochda12d292016-06-02 14:46:10 +01002266 // Prepare for tail call only if ES2015 tail call elimination is active.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002267 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002268 ExternalReference is_tail_call_elimination_enabled =
2269 ExternalReference::is_tail_call_elimination_enabled_address(
2270 masm->isolate());
2271 __ Move(kScratchRegister, is_tail_call_elimination_enabled);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002272 __ cmpb(Operand(kScratchRegister, 0), Immediate(0));
Ben Murdochda12d292016-06-02 14:46:10 +01002273 __ j(equal, &done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002274
2275 // Drop possible interpreter handler/stub frame.
2276 {
2277 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002278 __ Cmp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01002279 Smi::FromInt(StackFrame::STUB));
2280 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2281 __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2282 __ bind(&no_interpreter_frame);
2283 }
2284
2285 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002286 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002287 Label no_arguments_adaptor, formal_parameter_count_loaded;
2288 __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002289 __ Cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01002290 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2291 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2292
Ben Murdochda12d292016-06-02 14:46:10 +01002293 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002294 __ movp(rbp, scratch2);
2295 __ SmiToInteger32(
Ben Murdochda12d292016-06-02 14:46:10 +01002296 caller_args_count_reg,
2297 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002298 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2299
2300 __ bind(&no_arguments_adaptor);
2301 // Load caller's formal parameter count
2302 __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2303 __ movp(scratch1,
2304 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2305 __ LoadSharedFunctionInfoSpecialField(
Ben Murdochda12d292016-06-02 14:46:10 +01002306 caller_args_count_reg, scratch1,
2307 SharedFunctionInfo::kFormalParameterCountOffset);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002308
2309 __ bind(&formal_parameter_count_loaded);
2310
Ben Murdochda12d292016-06-02 14:46:10 +01002311 ParameterCount callee_args_count(args_reg);
2312 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2313 scratch3, ReturnAddressState::kOnStack);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002314 __ bind(&done);
2315}
2316} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002317
2318// static
2319void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002320 ConvertReceiverMode mode,
2321 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002322 // ----------- S t a t e -------------
2323 // -- rax : the number of arguments (not including the receiver)
2324 // -- rdi : the function to call (checked to be a JSFunction)
2325 // -----------------------------------
2326 StackArgumentsAccessor args(rsp, rax);
2327 __ AssertFunction(rdi);
2328
2329 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2330 // Check that the function is not a "classConstructor".
2331 Label class_constructor;
2332 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2333 __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset),
2334 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2335 __ j(not_zero, &class_constructor);
2336
2337 // ----------- S t a t e -------------
2338 // -- rax : the number of arguments (not including the receiver)
2339 // -- rdx : the shared function info.
2340 // -- rdi : the function to call (checked to be a JSFunction)
2341 // -----------------------------------
2342
2343 // Enter the context of the function; ToObject has to run in the function
2344 // context, and we also need to take the global proxy from the function
2345 // context in case of conversion.
2346 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2347 SharedFunctionInfo::kStrictModeByteOffset);
2348 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2349 // We need to convert the receiver for non-native sloppy mode functions.
2350 Label done_convert;
2351 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
2352 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2353 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2354 __ j(not_zero, &done_convert);
2355 {
2356 // ----------- S t a t e -------------
2357 // -- rax : the number of arguments (not including the receiver)
2358 // -- rdx : the shared function info.
2359 // -- rdi : the function to call (checked to be a JSFunction)
2360 // -- rsi : the function context.
2361 // -----------------------------------
2362
2363 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2364 // Patch receiver to global proxy.
2365 __ LoadGlobalProxy(rcx);
2366 } else {
2367 Label convert_to_object, convert_receiver;
2368 __ movp(rcx, args.GetReceiverOperand());
2369 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2370 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2371 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2372 __ j(above_equal, &done_convert);
2373 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2374 Label convert_global_proxy;
2375 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
2376 &convert_global_proxy, Label::kNear);
2377 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
2378 Label::kNear);
2379 __ bind(&convert_global_proxy);
2380 {
2381 // Patch receiver to global proxy.
2382 __ LoadGlobalProxy(rcx);
2383 }
2384 __ jmp(&convert_receiver);
2385 }
2386 __ bind(&convert_to_object);
2387 {
2388 // Convert receiver using ToObject.
2389 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2390 // in the fast case? (fall back to AllocateInNewSpace?)
2391 FrameScope scope(masm, StackFrame::INTERNAL);
2392 __ Integer32ToSmi(rax, rax);
2393 __ Push(rax);
2394 __ Push(rdi);
2395 __ movp(rax, rcx);
2396 ToObjectStub stub(masm->isolate());
2397 __ CallStub(&stub);
2398 __ movp(rcx, rax);
2399 __ Pop(rdi);
2400 __ Pop(rax);
2401 __ SmiToInteger32(rax, rax);
2402 }
2403 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2404 __ bind(&convert_receiver);
2405 }
2406 __ movp(args.GetReceiverOperand(), rcx);
2407 }
2408 __ bind(&done_convert);
2409
2410 // ----------- S t a t e -------------
2411 // -- rax : the number of arguments (not including the receiver)
2412 // -- rdx : the shared function info.
2413 // -- rdi : the function to call (checked to be a JSFunction)
2414 // -- rsi : the function context.
2415 // -----------------------------------
2416
Ben Murdoch097c5b22016-05-18 11:27:45 +01002417 if (tail_call_mode == TailCallMode::kAllow) {
2418 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2419 }
2420
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002421 __ LoadSharedFunctionInfoSpecialField(
2422 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
2423 ParameterCount actual(rax);
2424 ParameterCount expected(rbx);
2425
2426 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION,
2427 CheckDebugStepCallWrapper());
2428
2429 // The function is a "classConstructor", need to raise an exception.
2430 __ bind(&class_constructor);
2431 {
2432 FrameScope frame(masm, StackFrame::INTERNAL);
2433 __ Push(rdi);
2434 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2435 }
2436}
2437
2438
2439namespace {
2440
2441void Generate_PushBoundArguments(MacroAssembler* masm) {
2442 // ----------- S t a t e -------------
2443 // -- rax : the number of arguments (not including the receiver)
2444 // -- rdx : new.target (only in case of [[Construct]])
2445 // -- rdi : target (checked to be a JSBoundFunction)
2446 // -----------------------------------
2447
2448 // Load [[BoundArguments]] into rcx and length of that into rbx.
2449 Label no_bound_arguments;
2450 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2451 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2452 __ testl(rbx, rbx);
2453 __ j(zero, &no_bound_arguments);
2454 {
2455 // ----------- S t a t e -------------
2456 // -- rax : the number of arguments (not including the receiver)
2457 // -- rdx : new.target (only in case of [[Construct]])
2458 // -- rdi : target (checked to be a JSBoundFunction)
2459 // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2460 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2461 // -----------------------------------
2462
2463 // Reserve stack space for the [[BoundArguments]].
2464 {
2465 Label done;
2466 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2467 __ subp(rsp, kScratchRegister);
2468 // Check the stack for overflow. We are not trying to catch interruptions
2469 // (i.e. debug break and preemption) here, so check the "real stack
2470 // limit".
2471 __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2472 __ j(greater, &done, Label::kNear); // Signed comparison.
2473 // Restore the stack pointer.
2474 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2475 {
2476 FrameScope scope(masm, StackFrame::MANUAL);
2477 __ EnterFrame(StackFrame::INTERNAL);
2478 __ CallRuntime(Runtime::kThrowStackOverflow);
2479 }
2480 __ bind(&done);
2481 }
2482
2483 // Adjust effective number of arguments to include return address.
2484 __ incl(rax);
2485
2486 // Relocate arguments and return address down the stack.
2487 {
2488 Label loop;
2489 __ Set(rcx, 0);
2490 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2491 __ bind(&loop);
2492 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2493 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2494 __ incl(rcx);
2495 __ cmpl(rcx, rax);
2496 __ j(less, &loop);
2497 }
2498
2499 // Copy [[BoundArguments]] to the stack (below the arguments).
2500 {
2501 Label loop;
2502 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2503 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2504 __ bind(&loop);
2505 __ decl(rbx);
2506 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2507 FixedArray::kHeaderSize));
2508 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2509 __ leal(rax, Operand(rax, 1));
2510 __ j(greater, &loop);
2511 }
2512
2513 // Adjust effective number of arguments (rax contains the number of
2514 // arguments from the call plus return address plus the number of
2515 // [[BoundArguments]]), so we need to subtract one for the return address.
2516 __ decl(rax);
2517 }
2518 __ bind(&no_bound_arguments);
2519}
2520
2521} // namespace
2522
2523
2524// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002525void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2526 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002527 // ----------- S t a t e -------------
2528 // -- rax : the number of arguments (not including the receiver)
2529 // -- rdi : the function to call (checked to be a JSBoundFunction)
2530 // -----------------------------------
2531 __ AssertBoundFunction(rdi);
2532
Ben Murdoch097c5b22016-05-18 11:27:45 +01002533 if (tail_call_mode == TailCallMode::kAllow) {
2534 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2535 }
2536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002537 // Patch the receiver to [[BoundThis]].
2538 StackArgumentsAccessor args(rsp, rax);
2539 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2540 __ movp(args.GetReceiverOperand(), rbx);
2541
2542 // Push the [[BoundArguments]] onto the stack.
2543 Generate_PushBoundArguments(masm);
2544
2545 // Call the [[BoundTargetFunction]] via the Call builtin.
2546 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2547 __ Load(rcx,
2548 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2549 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2550 __ jmp(rcx);
2551}
2552
2553
2554// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002555void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2556 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002557 // ----------- S t a t e -------------
2558 // -- rax : the number of arguments (not including the receiver)
2559 // -- rdi : the target to call (can be any Object)
2560 // -----------------------------------
2561 StackArgumentsAccessor args(rsp, rax);
2562
2563 Label non_callable, non_function, non_smi;
2564 __ JumpIfSmi(rdi, &non_callable);
2565 __ bind(&non_smi);
2566 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002567 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002568 RelocInfo::CODE_TARGET);
2569 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002570 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002571 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002572
2573 // Check if target has a [[Call]] internal method.
2574 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2575 Immediate(1 << Map::kIsCallable));
2576 __ j(zero, &non_callable);
2577
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002578 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2579 __ j(not_equal, &non_function);
2580
Ben Murdoch097c5b22016-05-18 11:27:45 +01002581 // 0. Prepare for tail call if necessary.
2582 if (tail_call_mode == TailCallMode::kAllow) {
2583 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2584 }
2585
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002586 // 1. Runtime fallback for Proxy [[Call]].
2587 __ PopReturnAddressTo(kScratchRegister);
2588 __ Push(rdi);
2589 __ PushReturnAddressFrom(kScratchRegister);
2590 // Increase the arguments size to include the pushed function and the
2591 // existing receiver on the stack.
2592 __ addp(rax, Immediate(2));
2593 // Tail-call to the runtime.
2594 __ JumpToExternalReference(
2595 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2596
2597 // 2. Call to something else, which might have a [[Call]] internal method (if
2598 // not we raise an exception).
2599 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002600 // Overwrite the original receiver with the (original) target.
2601 __ movp(args.GetReceiverOperand(), rdi);
2602 // Let the "call_as_function_delegate" take care of the rest.
2603 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2604 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002605 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002606 RelocInfo::CODE_TARGET);
2607
2608 // 3. Call to something that is not callable.
2609 __ bind(&non_callable);
2610 {
2611 FrameScope scope(masm, StackFrame::INTERNAL);
2612 __ Push(rdi);
2613 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2614 }
2615}
2616
2617
2618// static
2619void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2620 // ----------- S t a t e -------------
2621 // -- rax : the number of arguments (not including the receiver)
2622 // -- rdx : the new target (checked to be a constructor)
2623 // -- rdi : the constructor to call (checked to be a JSFunction)
2624 // -----------------------------------
2625 __ AssertFunction(rdi);
2626
2627 // Calling convention for function specific ConstructStubs require
2628 // rbx to contain either an AllocationSite or undefined.
2629 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2630
2631 // Tail call to the function-specific construct stub (still in the caller
2632 // context at this point).
2633 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2634 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
2635 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2636 __ jmp(rcx);
2637}
2638
2639
2640// static
2641void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2642 // ----------- S t a t e -------------
2643 // -- rax : the number of arguments (not including the receiver)
2644 // -- rdx : the new target (checked to be a constructor)
2645 // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2646 // -----------------------------------
2647 __ AssertBoundFunction(rdi);
2648
2649 // Push the [[BoundArguments]] onto the stack.
2650 Generate_PushBoundArguments(masm);
2651
2652 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2653 {
2654 Label done;
2655 __ cmpp(rdi, rdx);
2656 __ j(not_equal, &done, Label::kNear);
2657 __ movp(rdx,
2658 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2659 __ bind(&done);
2660 }
2661
2662 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2663 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2664 __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate()));
2665 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2666 __ jmp(rcx);
2667}
2668
2669
2670// static
2671void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2672 // ----------- S t a t e -------------
2673 // -- rax : the number of arguments (not including the receiver)
2674 // -- rdi : the constructor to call (checked to be a JSProxy)
2675 // -- rdx : the new target (either the same as the constructor or
2676 // the JSFunction on which new was invoked initially)
2677 // -----------------------------------
2678
2679 // Call into the Runtime for Proxy [[Construct]].
2680 __ PopReturnAddressTo(kScratchRegister);
2681 __ Push(rdi);
2682 __ Push(rdx);
2683 __ PushReturnAddressFrom(kScratchRegister);
2684 // Include the pushed new_target, constructor and the receiver.
2685 __ addp(rax, Immediate(3));
2686 __ JumpToExternalReference(
2687 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2688}
2689
2690
2691// static
2692void Builtins::Generate_Construct(MacroAssembler* masm) {
2693 // ----------- S t a t e -------------
2694 // -- rax : the number of arguments (not including the receiver)
2695 // -- rdx : the new target (either the same as the constructor or
2696 // the JSFunction on which new was invoked initially)
2697 // -- rdi : the constructor to call (can be any Object)
2698 // -----------------------------------
2699 StackArgumentsAccessor args(rsp, rax);
2700
2701 // Check if target is a Smi.
2702 Label non_constructor;
2703 __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
2704
2705 // Dispatch based on instance type.
2706 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2707 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2708 RelocInfo::CODE_TARGET);
2709
2710 // Check if target has a [[Construct]] internal method.
2711 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2712 Immediate(1 << Map::kIsConstructor));
2713 __ j(zero, &non_constructor, Label::kNear);
2714
2715 // Only dispatch to bound functions after checking whether they are
2716 // constructors.
2717 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2718 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2719 RelocInfo::CODE_TARGET);
2720
2721 // Only dispatch to proxies after checking whether they are constructors.
2722 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2723 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2724 RelocInfo::CODE_TARGET);
2725
2726 // Called Construct on an exotic Object with a [[Construct]] internal method.
2727 {
2728 // Overwrite the original receiver with the (original) target.
2729 __ movp(args.GetReceiverOperand(), rdi);
2730 // Let the "call_as_constructor_delegate" take care of the rest.
2731 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2732 __ Jump(masm->isolate()->builtins()->CallFunction(),
2733 RelocInfo::CODE_TARGET);
2734 }
2735
2736 // Called Construct on an Object that doesn't have a [[Construct]] internal
2737 // method.
2738 __ bind(&non_constructor);
2739 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2740 RelocInfo::CODE_TARGET);
2741}
2742
2743
2744static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2745 Register function_template_info,
2746 Register scratch0, Register scratch1,
2747 Register scratch2,
2748 Label* receiver_check_failed) {
2749 Register signature = scratch0;
2750 Register map = scratch1;
2751 Register constructor = scratch2;
2752
2753 // If there is no signature, return the holder.
2754 __ movp(signature, FieldOperand(function_template_info,
2755 FunctionTemplateInfo::kSignatureOffset));
2756 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
2757 Label receiver_check_passed;
2758 __ j(equal, &receiver_check_passed, Label::kNear);
2759
2760 // Walk the prototype chain.
2761 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
2762 Label prototype_loop_start;
2763 __ bind(&prototype_loop_start);
2764
2765 // Get the constructor, if any.
2766 __ GetMapConstructor(constructor, map, kScratchRegister);
2767 __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE);
2768 Label next_prototype;
2769 __ j(not_equal, &next_prototype, Label::kNear);
2770
2771 // Get the constructor's signature.
2772 Register type = constructor;
2773 __ movp(type,
2774 FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
2775 __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset));
2776
2777 // Loop through the chain of inheriting function templates.
2778 Label function_template_loop;
2779 __ bind(&function_template_loop);
2780
2781 // If the signatures match, we have a compatible receiver.
2782 __ cmpp(signature, type);
2783 __ j(equal, &receiver_check_passed, Label::kNear);
2784
2785 // If the current type is not a FunctionTemplateInfo, load the next prototype
2786 // in the chain.
2787 __ JumpIfSmi(type, &next_prototype, Label::kNear);
2788 __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister);
2789 __ j(not_equal, &next_prototype, Label::kNear);
2790
2791 // Otherwise load the parent function template and iterate.
2792 __ movp(type,
2793 FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
2794 __ jmp(&function_template_loop, Label::kNear);
2795
2796 // Load the next prototype.
2797 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002798 __ testq(FieldOperand(map, Map::kBitField3Offset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01002799 Immediate(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002800 __ j(zero, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002801 __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset));
2802 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002803 // Iterate.
2804 __ jmp(&prototype_loop_start, Label::kNear);
2805
2806 __ bind(&receiver_check_passed);
2807}
2808
2809
2810void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
2811 // ----------- S t a t e -------------
2812 // -- rax : number of arguments (not including the receiver)
2813 // -- rdi : callee
2814 // -- rsi : context
2815 // -- rsp[0] : return address
2816 // -- rsp[8] : last argument
2817 // -- ...
2818 // -- rsp[rax * 8] : first argument
2819 // -- rsp[(rax + 1) * 8] : receiver
2820 // -----------------------------------
2821
2822 StackArgumentsAccessor args(rsp, rax);
2823
2824 // Load the FunctionTemplateInfo.
2825 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2826 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
2827
2828 // Do the compatible receiver check.
2829 Label receiver_check_failed;
2830 __ movp(rcx, args.GetReceiverOperand());
2831 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed);
2832
2833 // Get the callback offset from the FunctionTemplateInfo, and jump to the
2834 // beginning of the code.
2835 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset));
2836 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset));
2837 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2838 __ jmp(rdx);
2839
2840 // Compatible receiver check failed: pop return address, arguments and
2841 // receiver and throw an Illegal Invocation exception.
2842 __ bind(&receiver_check_failed);
2843 __ PopReturnAddressTo(rbx);
2844 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize));
2845 __ addp(rsp, rax);
2846 __ PushReturnAddressFrom(rbx);
2847 {
2848 FrameScope scope(masm, StackFrame::INTERNAL);
2849 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
2850 }
2851}
2852
2853
Ben Murdochb0fe1622011-05-05 13:52:32 +01002854void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002855 // Lookup the function in the JavaScript frame.
2856 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002857 {
2858 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002859 // Pass function as argument.
2860 __ Push(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002861 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002862 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002863
Ben Murdoch257744e2011-11-30 15:57:28 +00002864 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002865 // If the code object is null, just return to the unoptimized code.
2866 __ cmpp(rax, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002867 __ j(not_equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002868 __ ret(0);
2869
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002870 __ bind(&skip);
2871
2872 // Load deoptimization data from the code object.
2873 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2874
2875 // Load the OSR entrypoint offset from the deoptimization data.
2876 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
2877 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
2878
2879 // Compute the target address = code_obj + header_size + osr_offset
2880 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2881
2882 // Overwrite the return address on the stack.
2883 __ movq(StackOperandForReturnAddress(0), rax);
2884
2885 // And "return" to the OSR entry point of the function.
2886 __ ret(0);
2887}
2888
2889
Ben Murdochb0fe1622011-05-05 13:52:32 +01002890#undef __
2891
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002892} // namespace internal
2893} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01002894
2895#endif // V8_TARGET_ARCH_X64