blob: 316378348c20fdb37668f50af1a482fd051f153e [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-factory.h"
8#include "src/codegen.h"
9#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000011
12namespace v8 {
13namespace internal {
14
Ben Murdochb0fe1622011-05-05 13:52:32 +010015
Steve Blocka7e24c12009-10-30 11:49:00 +000016#define __ ACCESS_MASM(masm)
17
Steve Blocka7e24c12009-10-30 11:49:00 +000018
Leon Clarkee46be812010-01-19 14:06:41 +000019void Builtins::Generate_Adaptor(MacroAssembler* masm,
20 CFunctionId id,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // -- rax : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 // -- rdi : target
25 // -- rdx : new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026 // -- rsp[0] : return address
27 // -- rsp[8] : last argument
Leon Clarkee46be812010-01-19 14:06:41 +000028 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 // -- rsp[8 * argc] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000030 // -- rsp[8 * (argc + 1)] : receiver
Leon Clarkee46be812010-01-19 14:06:41 +000031 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(rdi);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000039
40 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 if (extra_args != BuiltinExtraArguments::kNone) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 __ PopReturnAddressTo(kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 if (extra_args & BuiltinExtraArguments::kTarget) {
45 ++num_extra_args;
46 __ Push(rdi);
47 }
48 if (extra_args & BuiltinExtraArguments::kNewTarget) {
49 ++num_extra_args;
50 __ Push(rdx);
51 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 __ PushReturnAddressFrom(kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +000053 }
54
Steve Block6ded16b2010-05-10 14:33:55 +010055 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000056 // including the receiver and the extra arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 __ addp(rax, Immediate(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058
59 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000060}
61
62
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
64 __ movp(kScratchRegister,
65 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
66 __ movp(kScratchRegister,
67 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
68 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
69 __ jmp(kScratchRegister);
70}
71
Ben Murdoch097c5b22016-05-18 11:27:45 +010072static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
73 Runtime::FunctionId function_id) {
74 // ----------- S t a t e -------------
75 // -- rax : argument count (preserved for callee)
76 // -- rdx : new target (preserved for callee)
77 // -- rdi : target function (preserved for callee)
78 // -----------------------------------
79 {
80 FrameScope scope(masm, StackFrame::INTERNAL);
81 // Push the number of arguments to the callee.
82 __ Integer32ToSmi(rax, rax);
83 __ Push(rax);
84 // Push a copy of the target function and the new target.
85 __ Push(rdi);
86 __ Push(rdx);
87 // Function is also the parameter to the runtime call.
88 __ Push(rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089
Ben Murdoch097c5b22016-05-18 11:27:45 +010090 __ CallRuntime(function_id, 1);
91 __ movp(rbx, rax);
92
93 // Restore target function and new target.
94 __ Pop(rdx);
95 __ Pop(rdi);
96 __ Pop(rax);
97 __ SmiToInteger32(rax, rax);
98 }
99 __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize));
100 __ jmp(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101}
102
103
104void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
105 // Checking whether the queued function is ready for install is optional,
106 // since we come across interrupts and stack checks elsewhere. However,
107 // not checking may delay installing ready functions, and always checking
108 // would be quite expensive. A good compromise is to first check against
109 // stack limit as a cue for an interrupt signal.
110 Label ok;
111 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
112 __ j(above_equal, &ok);
113
Ben Murdoch097c5b22016-05-18 11:27:45 +0100114 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000115
116 __ bind(&ok);
117 GenerateTailCallToSharedCode(masm);
118}
119
120
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100121static void Generate_JSConstructStubHelper(MacroAssembler* masm,
122 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100123 bool create_implicit_receiver,
124 bool check_derived_construct) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000125 // ----------- S t a t e -------------
126 // -- rax: number of arguments
Ben Murdochda12d292016-06-02 14:46:10 +0100127 // -- rsi: context
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 // -- rdi: constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 // -- rbx: allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 // -- rdx: new target
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 // -----------------------------------
132
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100133 // Enter a construct frame.
134 {
135 FrameScope scope(masm, StackFrame::CONSTRUCT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000136
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000137 // Preserve the incoming parameters on the stack.
138 __ AssertUndefinedOrAllocationSite(rbx);
Ben Murdochda12d292016-06-02 14:46:10 +0100139 __ Push(rsi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140 __ Push(rbx);
141 __ Integer32ToSmi(rcx, rax);
142 __ Push(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000144 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100145 // Allocate the new receiver object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146 __ Push(rdi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000147 __ Push(rdx);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100148 FastNewObjectStub stub(masm->isolate());
149 __ CallStub(&stub);
150 __ movp(rbx, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151 __ Pop(rdx);
152 __ Pop(rdi);
153
Ben Murdoch097c5b22016-05-18 11:27:45 +0100154 // ----------- S t a t e -------------
155 // -- rdi: constructor function
156 // -- rbx: newly allocated object
157 // -- rdx: new target
158 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159
160 // Retrieve smi-tagged arguments count from the stack.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100161 __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 }
163
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000164 if (create_implicit_receiver) {
165 // Push the allocated receiver to the stack. We need two copies
166 // because we may have to return the original one and the calling
167 // conventions dictate that the called function pops the receiver.
168 __ Push(rbx);
169 __ Push(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000171 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100173
174 // Set up pointer to last argument.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000175 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100176
177 // Copy arguments and receiver to the expression stack.
178 Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100180 __ jmp(&entry);
181 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100183 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 __ decp(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100185 __ j(greater_equal, &loop);
186
187 // Call the function.
188 if (is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100190 Handle<Code> code =
191 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100193 } else {
194 ParameterCount actual(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION,
196 CheckDebugStepCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100197 }
198
199 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100201 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
202 }
203
204 // Restore context from the frame.
Ben Murdochda12d292016-06-02 14:46:10 +0100205 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000207 if (create_implicit_receiver) {
208 // If the result is an object (in the ECMA sense), we should get rid
209 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
210 // on page 74.
211 Label use_receiver, exit;
212 // If the result is a smi, it is *not* an object in the ECMA sense.
213 __ JumpIfSmi(rax, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100214
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000215 // If the type of the result (stored in its map) is less than
216 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
217 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
218 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
219 __ j(above_equal, &exit);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100220
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000221 // Throw away the result of the constructor invocation and use the
222 // on-stack receiver as the result.
223 __ bind(&use_receiver);
224 __ movp(rax, Operand(rsp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100225
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000226 // Restore the arguments count and leave the construct frame. The
227 // arguments count is stored below the receiver.
228 __ bind(&exit);
229 __ movp(rbx, Operand(rsp, 1 * kPointerSize));
230 } else {
231 __ movp(rbx, Operand(rsp, 0));
232 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100233
234 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000235 }
236
Ben Murdoch097c5b22016-05-18 11:27:45 +0100237 // ES6 9.2.2. Step 13+
238 // Check that the result is not a Smi, indicating that the constructor result
239 // from a derived class is neither undefined nor an Object.
240 if (check_derived_construct) {
241 Label dont_throw;
242 __ JumpIfNotSmi(rax, &dont_throw);
243 {
244 FrameScope scope(masm, StackFrame::INTERNAL);
245 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
246 }
247 __ bind(&dont_throw);
248 }
249
Steve Blocka7e24c12009-10-30 11:49:00 +0000250 // Remove caller arguments from the stack and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251 __ PopReturnAddressTo(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000252 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
254 __ PushReturnAddressFrom(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 if (create_implicit_receiver) {
256 Counters* counters = masm->isolate()->counters();
257 __ IncrementCounter(counters->constructed_objects(), 1);
258 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000259 __ ret(0);
260}
261
262
Leon Clarkee46be812010-01-19 14:06:41 +0000263void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100264 Generate_JSConstructStubHelper(masm, false, true, false);
Leon Clarkee46be812010-01-19 14:06:41 +0000265}
266
267
268void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270}
271
272
273void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100274 Generate_JSConstructStubHelper(masm, false, false, false);
275}
276
277
278void Builtins::Generate_JSBuiltinsConstructStubForDerived(
279 MacroAssembler* masm) {
280 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000281}
282
283
284void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
285 FrameScope scope(masm, StackFrame::INTERNAL);
286 __ Push(rdi);
287 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
288}
289
290
291enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
292
293
294// Clobbers rcx, r11, kScratchRegister; preserves all other registers.
295static void Generate_CheckStackOverflow(MacroAssembler* masm,
296 IsTagged rax_is_tagged) {
297 // rax : the number of items to be pushed to the stack
298 //
299 // Check the stack for overflow. We are not trying to catch
300 // interruptions (e.g. debug break and preemption) here, so the "real stack
301 // limit" is checked.
302 Label okay;
303 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
304 __ movp(rcx, rsp);
305 // Make rcx the space we have left. The stack might already be overflowed
306 // here which will cause rcx to become negative.
307 __ subp(rcx, kScratchRegister);
308 // Make r11 the space we need for the array when it is unrolled onto the
309 // stack.
310 if (rax_is_tagged == kRaxIsSmiTagged) {
311 __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
312 } else {
313 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
314 __ movp(r11, rax);
315 __ shlq(r11, Immediate(kPointerSizeLog2));
316 }
317 // Check if the arguments will overflow the stack.
318 __ cmpp(rcx, r11);
319 __ j(greater, &okay); // Signed comparison.
320
321 // Out of stack space.
322 __ CallRuntime(Runtime::kThrowStackOverflow);
323
324 __ bind(&okay);
Leon Clarkee46be812010-01-19 14:06:41 +0000325}
326
327
Steve Blocka7e24c12009-10-30 11:49:00 +0000328static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
329 bool is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 ProfileEntryHookStub::MaybeCallEntryHook(masm);
331
Steve Blocka7e24c12009-10-30 11:49:00 +0000332 // Expects five C++ function parameters.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333 // - Object* new_target
334 // - JSFunction* function
Steve Blocka7e24c12009-10-30 11:49:00 +0000335 // - Object* receiver
336 // - int argc
337 // - Object*** argv
338 // (see Handle::Invoke in execution.cc).
339
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100340 // Open a C++ scope for the FrameScope.
341 {
342 // Platform specific argument handling. After this, the stack contains
343 // an internal frame and the pushed function and receiver, and
344 // register rax and rbx holds the argument count and argument array,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345 // while rdi holds the function pointer, rsi the context, and rdx the
346 // new.target.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100347
Steve Blocka7e24c12009-10-30 11:49:00 +0000348#ifdef _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100349 // MSVC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000350 // rcx : new_target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000351 // rdx : function
352 // r8 : receiver
353 // r9 : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100354 // [rsp+0x20] : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000355
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100356 // Enter an internal frame.
357 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000358
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000359 // Setup the context (we need to use the caller context from the isolate).
360 ExternalReference context_address(Isolate::kContextAddress,
361 masm->isolate());
362 __ movp(rsi, masm->ExternalOperand(context_address));
Steve Blocka7e24c12009-10-30 11:49:00 +0000363
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100364 // Push the function and the receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000365 __ Push(rdx);
366 __ Push(r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000367
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100368 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000369 __ movp(rax, r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100370 // Load the previous frame pointer to access C argument on stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371 __ movp(kScratchRegister, Operand(rbp, 0));
372 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100373 // Load the function pointer into rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000374 __ movp(rdi, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000375 // Load the new.target into rdx.
376 __ movp(rdx, rcx);
Steve Block6ded16b2010-05-10 14:33:55 +0100377#else // _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100378 // GCC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000379 // rdi : new_target
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100380 // rsi : function
381 // rdx : receiver
382 // rcx : argc
383 // r8 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000384
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000385 __ movp(r11, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000386 __ movp(rdi, rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100387 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000388 // r11 : new_target
Steve Blocka7e24c12009-10-30 11:49:00 +0000389
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100390 // Clear the context before we push it when entering the internal frame.
391 __ Set(rsi, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000392
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100393 // Enter an internal frame.
394 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000395
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000396 // Setup the context (we need to use the caller context from the isolate).
397 ExternalReference context_address(Isolate::kContextAddress,
398 masm->isolate());
399 __ movp(rsi, masm->ExternalOperand(context_address));
400
401 // Push the function and receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000402 __ Push(rdi);
403 __ Push(rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +0000404
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100405 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 __ movp(rax, rcx);
407 __ movp(rbx, r8);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000408
409 // Load the new.target into rdx.
410 __ movp(rdx, r11);
Steve Blocka7e24c12009-10-30 11:49:00 +0000411#endif // _WIN64
412
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100413 // Current stack contents:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000414 // [rsp + 2 * kPointerSize ... ] : Internal frame
415 // [rsp + kPointerSize] : function
416 // [rsp] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100417 // Current register contents:
418 // rax : argc
419 // rbx : argv
420 // rsi : context
421 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000422 // rdx : new.target
423
424 // Check if we have enough stack space to push all arguments.
425 // Expects argument count in rax. Clobbers rcx, r11.
426 Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
Steve Blocka7e24c12009-10-30 11:49:00 +0000427
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100428 // Copy arguments to the stack in a loop.
429 // Register rbx points to array of pointers to handle locations.
430 // Push the values of these handles.
431 Label loop, entry;
432 __ Set(rcx, 0); // Set loop variable to 0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000433 __ jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100434 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
436 __ Push(Operand(kScratchRegister, 0)); // dereference handle
437 __ addp(rcx, Immediate(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100438 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 __ cmpp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100440 __ j(not_equal, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000441
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000442 // Invoke the builtin code.
443 Handle<Code> builtin = is_construct
444 ? masm->isolate()->builtins()->Construct()
445 : masm->isolate()->builtins()->Call();
446 __ Call(builtin, RelocInfo::CODE_TARGET);
447
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100448 // Exit the internal frame. Notice that this also removes the empty
449 // context and the function left on the stack by the code
450 // invocation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000451 }
452
Steve Blocka7e24c12009-10-30 11:49:00 +0000453 // TODO(X64): Is argument correct? Is there a receiver to remove?
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100454 __ ret(1 * kPointerSize); // Remove receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000455}
456
457
458void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
459 Generate_JSEntryTrampolineHelper(masm, false);
460}
461
462
463void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
464 Generate_JSEntryTrampolineHelper(masm, true);
465}
466
Iain Merrick75681382010-08-19 15:07:18 +0100467
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000468// Generate code for entering a JS function with the interpreter.
469// On entry to the function the receiver and arguments have been pushed on the
470// stack left to right. The actual argument count matches the formal parameter
471// count expected by the function.
472//
473// The live registers are:
474// o rdi: the JS function object being called
475// o rdx: the new target
476// o rsi: our context
477// o rbp: the caller's frame pointer
478// o rsp: stack pointer (pointing to return address)
479//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100480// The function builds an interpreter frame. See InterpreterFrameConstants in
481// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
483 // Open a frame scope to indicate that there is a frame on the stack. The
484 // MANUAL indicates that the scope shouldn't actually generate code to set up
485 // the frame (that is done below).
486 FrameScope frame_scope(masm, StackFrame::MANUAL);
487 __ pushq(rbp); // Caller's frame pointer.
488 __ movp(rbp, rsp);
489 __ Push(rsi); // Callee's context.
490 __ Push(rdi); // Callee's JS function.
491 __ Push(rdx); // Callee's new target.
492
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 // Get the bytecode array from the function object and load the pointer to the
494 // first entry into edi (InterpreterBytecodeRegister).
495 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100496
497 Label load_debug_bytecode_array, bytecode_array_loaded;
498 DCHECK_EQ(Smi::FromInt(0), DebugInfo::uninitialized());
499 __ cmpp(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
500 Immediate(0));
501 __ j(not_equal, &load_debug_bytecode_array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000502 __ movp(kInterpreterBytecodeArrayRegister,
503 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100504 __ bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000505
506 if (FLAG_debug_code) {
507 // Check function data field is actually a BytecodeArray object.
508 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
509 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
510 rax);
511 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
512 }
513
Ben Murdoch097c5b22016-05-18 11:27:45 +0100514 // Push bytecode array.
515 __ Push(kInterpreterBytecodeArrayRegister);
516 // Push zero for bytecode array offset.
517 __ Push(Immediate(0));
518
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 // Allocate the local and temporary register file on the stack.
520 {
521 // Load frame size from the BytecodeArray object.
522 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
523 BytecodeArray::kFrameSizeOffset));
524
525 // Do a stack check to ensure we don't go over the limit.
526 Label ok;
527 __ movp(rdx, rsp);
528 __ subp(rdx, rcx);
529 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
530 __ j(above_equal, &ok, Label::kNear);
531 __ CallRuntime(Runtime::kThrowStackOverflow);
532 __ bind(&ok);
533
534 // If ok, push undefined as the initial value for all register file entries.
535 Label loop_header;
536 Label loop_check;
537 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
538 __ j(always, &loop_check);
539 __ bind(&loop_header);
540 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
541 __ Push(rdx);
542 // Continue loop if not done.
543 __ bind(&loop_check);
544 __ subp(rcx, Immediate(kPointerSize));
545 __ j(greater_equal, &loop_header, Label::kNear);
546 }
547
548 // TODO(rmcilroy): List of things not currently dealt with here but done in
549 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 // - Code aging of the BytecodeArray object.
552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000553 // Load accumulator, register file, bytecode offset, dispatch table into
554 // registers.
555 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
556 __ movp(kInterpreterRegisterFileRegister, rbp);
557 __ addp(kInterpreterRegisterFileRegister,
558 Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
559 __ movp(kInterpreterBytecodeOffsetRegister,
560 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100561 __ Move(
562 kInterpreterDispatchTableRegister,
563 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000564
565 // Dispatch to the first bytecode handler for the function.
566 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
567 kInterpreterBytecodeOffsetRegister, times_1, 0));
568 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
569 times_pointer_size, 0));
570 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
571 // and header removal.
572 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
573 __ call(rbx);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100574
575 // Even though the first bytecode handler was called, we will never return.
576 __ Abort(kUnexpectedReturnFromBytecodeHandler);
577
578 // Load debug copy of the bytecode array.
579 __ bind(&load_debug_bytecode_array);
580 Register debug_info = kInterpreterBytecodeArrayRegister;
581 __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset));
582 __ movp(kInterpreterBytecodeArrayRegister,
583 FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
584 __ jmp(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000585}
586
587
588void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
589 // TODO(rmcilroy): List of things not currently dealt with here but done in
590 // fullcodegen's EmitReturnSequence.
591 // - Supporting FLAG_trace for Runtime::TraceExit.
592 // - Support profiler (specifically decrementing profiling_counter
593 // appropriately and calling out to HandleInterrupts if necessary).
594
595 // The return value is in accumulator, which is already in rax.
596
597 // Leave the frame (also dropping the register file).
598 __ leave();
599
600 // Drop receiver + arguments and return.
601 __ movl(rbx, FieldOperand(kInterpreterBytecodeArrayRegister,
602 BytecodeArray::kParameterSizeOffset));
603 __ PopReturnAddressTo(rcx);
604 __ addp(rsp, rbx);
605 __ PushReturnAddressFrom(rcx);
606 __ ret(0);
607}
608
609
610static void Generate_InterpreterPushArgs(MacroAssembler* masm,
611 bool push_receiver) {
612 // ----------- S t a t e -------------
613 // -- rax : the number of arguments (not including the receiver)
614 // -- rbx : the address of the first argument to be pushed. Subsequent
615 // arguments should be consecutive above this, in the same order as
616 // they are to be pushed onto the stack.
617 // -----------------------------------
618
619 // Find the address of the last argument.
620 __ movp(rcx, rax);
621 if (push_receiver) {
622 __ addp(rcx, Immediate(1)); // Add one for receiver.
623 }
624
625 __ shlp(rcx, Immediate(kPointerSizeLog2));
626 __ negp(rcx);
627 __ addp(rcx, rbx);
628
629 // Push the arguments.
630 Label loop_header, loop_check;
631 __ j(always, &loop_check);
632 __ bind(&loop_header);
633 __ Push(Operand(rbx, 0));
634 __ subp(rbx, Immediate(kPointerSize));
635 __ bind(&loop_check);
636 __ cmpp(rbx, rcx);
637 __ j(greater, &loop_header, Label::kNear);
638}
639
640
641// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100642void Builtins::Generate_InterpreterPushArgsAndCallImpl(
643 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 // ----------- S t a t e -------------
645 // -- rax : the number of arguments (not including the receiver)
646 // -- rbx : the address of the first argument to be pushed. Subsequent
647 // arguments should be consecutive above this, in the same order as
648 // they are to be pushed onto the stack.
649 // -- rdi : the target to call (can be any Object).
650 // -----------------------------------
651
652 // Pop return address to allow tail-call after pushing arguments.
653 __ PopReturnAddressTo(kScratchRegister);
654
655 Generate_InterpreterPushArgs(masm, true);
656
657 // Call the target.
658 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100659 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
660 tail_call_mode),
661 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662}
663
664
665// static
666void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
667 // ----------- S t a t e -------------
668 // -- rax : the number of arguments (not including the receiver)
669 // -- rdx : the new target (either the same as the constructor or
670 // the JSFunction on which new was invoked initially)
671 // -- rdi : the constructor to call (can be any Object)
672 // -- rbx : the address of the first argument to be pushed. Subsequent
673 // arguments should be consecutive above this, in the same order as
674 // they are to be pushed onto the stack.
675 // -----------------------------------
676
677 // Pop return address to allow tail-call after pushing arguments.
678 __ PopReturnAddressTo(kScratchRegister);
679
680 // Push slot for the receiver to be constructed.
681 __ Push(Immediate(0));
682
683 Generate_InterpreterPushArgs(masm, false);
684
685 // Push return address in preparation for the tail-call.
686 __ PushReturnAddressFrom(kScratchRegister);
687
688 // Call the constructor (rax, rdx, rdi passed on).
689 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
690}
691
692
Ben Murdoch097c5b22016-05-18 11:27:45 +0100693static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694 // Initialize register file register and dispatch table register.
695 __ movp(kInterpreterRegisterFileRegister, rbp);
696 __ addp(kInterpreterRegisterFileRegister,
697 Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100698 __ Move(
699 kInterpreterDispatchTableRegister,
700 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701
702 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703 __ movp(kContextRegister,
704 Operand(kInterpreterRegisterFileRegister,
705 InterpreterFrameConstants::kContextFromRegisterPointer));
706
707 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100708 __ movp(
709 kInterpreterBytecodeArrayRegister,
710 Operand(kInterpreterRegisterFileRegister,
711 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000712
713 if (FLAG_debug_code) {
714 // Check function data field is actually a BytecodeArray object.
715 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
716 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
717 rbx);
718 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
719 }
720
721 // Get the target bytecode offset from the frame.
722 __ movp(
723 kInterpreterBytecodeOffsetRegister,
724 Operand(kInterpreterRegisterFileRegister,
725 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
726 __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
727 kInterpreterBytecodeOffsetRegister);
728
729 // Dispatch to the target bytecode.
730 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
731 kInterpreterBytecodeOffsetRegister, times_1, 0));
732 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
733 times_pointer_size, 0));
734 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
735 __ jmp(rbx);
736}
737
738
Ben Murdoch097c5b22016-05-18 11:27:45 +0100739static void Generate_InterpreterNotifyDeoptimizedHelper(
740 MacroAssembler* masm, Deoptimizer::BailoutType type) {
741 // Enter an internal frame.
742 {
743 FrameScope scope(masm, StackFrame::INTERNAL);
744
745 // Pass the deoptimization type to the runtime system.
746 __ Push(Smi::FromInt(static_cast<int>(type)));
747 __ CallRuntime(Runtime::kNotifyDeoptimized);
748 // Tear down internal frame.
749 }
750
751 // Drop state (we don't use these for interpreter deopts) and and pop the
752 // accumulator value into the accumulator register and push PC at top
753 // of stack (to simulate initial call to bytecode handler in interpreter entry
754 // trampoline).
755 __ Pop(rbx);
756 __ Drop(1);
757 __ Pop(kInterpreterAccumulatorRegister);
758 __ Push(rbx);
759
760 // Enter the bytecode dispatch.
761 Generate_EnterBytecodeDispatch(masm);
762}
763
764
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000765void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
766 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
767}
768
769
770void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
771 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
772}
773
774
775void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
776 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
777}
778
Ben Murdoch097c5b22016-05-18 11:27:45 +0100779void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
780 // Set the address of the interpreter entry trampoline as a return address.
781 // This simulates the initial call to bytecode handlers in interpreter entry
782 // trampoline. The return will never actually be taken, but our stack walker
783 // uses this address to determine whether a frame is interpreted.
784 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline());
785
786 Generate_EnterBytecodeDispatch(masm);
787}
788
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000789
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000790void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100791 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Iain Merrick75681382010-08-19 15:07:18 +0100792}
793
Ben Murdochb0fe1622011-05-05 13:52:32 +0100794
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100796 GenerateTailCallToReturnedCode(masm,
797 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000798}
799
800
801void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100802 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000803}
804
805
806static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
807 // For now, we are relying on the fact that make_code_young doesn't do any
808 // garbage collection which allows us to save/restore the registers without
809 // worrying about which of them contain pointers. We also don't build an
810 // internal frame to make the code faster, since we shouldn't have to do stack
811 // crawls in MakeCodeYoung. This seems a bit fragile.
812
813 // Re-execute the code that was patched back to the young age when
814 // the stub returns.
815 __ subp(Operand(rsp, 0), Immediate(5));
816 __ Pushad();
817 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
818 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
819 { // NOLINT
820 FrameScope scope(masm, StackFrame::MANUAL);
821 __ PrepareCallCFunction(2);
822 __ CallCFunction(
823 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
824 }
825 __ Popad();
826 __ ret(0);
827}
828
829
830#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
831void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
832 MacroAssembler* masm) { \
833 GenerateMakeCodeYoungAgainCommon(masm); \
834} \
835void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
836 MacroAssembler* masm) { \
837 GenerateMakeCodeYoungAgainCommon(masm); \
838}
839CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
840#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
841
842
843void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
844 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
845 // that make_code_young doesn't do any garbage collection which allows us to
846 // save/restore the registers without worrying about which of them contain
847 // pointers.
848 __ Pushad();
849 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
850 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
851 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
852 { // NOLINT
853 FrameScope scope(masm, StackFrame::MANUAL);
854 __ PrepareCallCFunction(2);
855 __ CallCFunction(
856 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
857 2);
858 }
859 __ Popad();
860
861 // Perform prologue operations usually performed by the young code stub.
862 __ PopReturnAddressTo(kScratchRegister);
863 __ pushq(rbp); // Caller's frame pointer.
864 __ movp(rbp, rsp);
865 __ Push(rsi); // Callee's context.
866 __ Push(rdi); // Callee's JS Function.
867 __ PushReturnAddressFrom(kScratchRegister);
868
869 // Jump to point after the code-age stub.
870 __ ret(0);
871}
872
873
874void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
875 GenerateMakeCodeYoungAgainCommon(masm);
876}
877
878
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000879void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
880 Generate_MarkCodeAsExecutedOnce(masm);
881}
882
883
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
885 SaveFPRegsMode save_doubles) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100886 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100887 {
888 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100889
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000890 // Preserve registers across notification, this is important for compiled
891 // stubs that tail call the runtime on deopts passing their parameters in
892 // registers.
893 __ Pushad();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000894 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000895 __ Popad();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100896 // Tear down internal frame.
897 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100898
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 __ DropUnderReturnAddress(1); // Ignore state offset
900 __ ret(0); // Return to IC Miss stub, continuation still on stack.
901}
902
903
904void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
905 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
906}
907
908
909void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
910 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100911}
912
913
914static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
915 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +0100916 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100917 {
918 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Block1e0659c2011-05-24 12:43:12 +0100919
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100920 // Pass the deoptimization type to the runtime system.
921 __ Push(Smi::FromInt(static_cast<int>(type)));
Steve Block1e0659c2011-05-24 12:43:12 +0100922
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000923 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100924 // Tear down internal frame.
925 }
Steve Block1e0659c2011-05-24 12:43:12 +0100926
927 // Get the full codegen state from the stack and untag it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
Steve Block1e0659c2011-05-24 12:43:12 +0100929
930 // Switch on the state.
Ben Murdoch257744e2011-11-30 15:57:28 +0000931 Label not_no_registers, not_tos_rax;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000932 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
Ben Murdoch257744e2011-11-30 15:57:28 +0000933 __ j(not_equal, &not_no_registers, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100934 __ ret(1 * kPointerSize); // Remove state.
935
936 __ bind(&not_no_registers);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000937 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
938 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
Ben Murdoch257744e2011-11-30 15:57:28 +0000939 __ j(not_equal, &not_tos_rax, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100940 __ ret(2 * kPointerSize); // Remove state, rax.
941
942 __ bind(&not_tos_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943 __ Abort(kNoCasesLeft);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100944}
945
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946
Ben Murdochb0fe1622011-05-05 13:52:32 +0100947void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
948 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
949}
950
951
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
953 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
954}
955
956
Ben Murdochb0fe1622011-05-05 13:52:32 +0100957void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +0100958 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100959}
960
961
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000962// static
963void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
964 int field_index) {
965 // ----------- S t a t e -------------
966 // -- rsp[0] : return address
967 // -- rsp[8] : receiver
968 // -----------------------------------
969
970 // 1. Load receiver into rax and check that it's actually a JSDate object.
971 Label receiver_not_date;
972 {
973 StackArgumentsAccessor args(rsp, 0);
974 __ movp(rax, args.GetReceiverOperand());
975 __ JumpIfSmi(rax, &receiver_not_date);
976 __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
977 __ j(not_equal, &receiver_not_date);
978 }
979
980 // 2. Load the specified date field, falling back to the runtime as necessary.
981 if (field_index == JSDate::kDateValue) {
982 __ movp(rax, FieldOperand(rax, JSDate::kValueOffset));
983 } else {
984 if (field_index < JSDate::kFirstUncachedField) {
985 Label stamp_mismatch;
986 __ Load(rdx, ExternalReference::date_cache_stamp(masm->isolate()));
987 __ cmpp(rdx, FieldOperand(rax, JSDate::kCacheStampOffset));
988 __ j(not_equal, &stamp_mismatch, Label::kNear);
989 __ movp(rax, FieldOperand(
990 rax, JSDate::kValueOffset + field_index * kPointerSize));
991 __ ret(1 * kPointerSize);
992 __ bind(&stamp_mismatch);
993 }
994 FrameScope scope(masm, StackFrame::INTERNAL);
995 __ PrepareCallCFunction(2);
996 __ Move(arg_reg_1, rax);
997 __ Move(arg_reg_2, Smi::FromInt(field_index));
998 __ CallCFunction(
999 ExternalReference::get_date_field_function(masm->isolate()), 2);
1000 }
1001 __ ret(1 * kPointerSize);
1002
1003 // 3. Raise a TypeError if the receiver is not a date.
1004 __ bind(&receiver_not_date);
1005 {
1006 FrameScope scope(masm, StackFrame::MANUAL);
1007 __ EnterFrame(StackFrame::INTERNAL);
1008 __ CallRuntime(Runtime::kThrowNotDateError);
1009 }
1010}
1011
Ben Murdochda12d292016-06-02 14:46:10 +01001012// static
1013void Builtins::Generate_FunctionHasInstance(MacroAssembler* masm) {
1014 // ----------- S t a t e -------------
1015 // -- rax : argc
1016 // -- rsp[0] : return address
1017 // -- rsp[8] : first argument (left-hand side)
1018 // -- rsp[16] : receiver (right-hand side)
1019 // -----------------------------------
1020
1021 {
1022 FrameScope scope(masm, StackFrame::INTERNAL);
1023 __ movp(InstanceOfDescriptor::LeftRegister(),
1024 Operand(rbp, 2 * kPointerSize)); // Load left-hand side.
1025 __ movp(InstanceOfDescriptor::RightRegister(),
1026 Operand(rbp, 3 * kPointerSize)); // Load right-hand side.
1027 InstanceOfStub stub(masm->isolate(), true);
1028 __ CallStub(&stub);
1029 }
1030
1031 // Pop the argument and the receiver.
1032 __ ret(2 * kPointerSize);
1033}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001034
1035// static
1036void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1037 // ----------- S t a t e -------------
1038 // -- rax : argc
1039 // -- rsp[0] : return address
1040 // -- rsp[8] : argArray
1041 // -- rsp[16] : thisArg
1042 // -- rsp[24] : receiver
1043 // -----------------------------------
1044
1045 // 1. Load receiver into rdi, argArray into rax (if present), remove all
1046 // arguments from the stack (including the receiver), and push thisArg (if
1047 // present) instead.
1048 {
1049 Label no_arg_array, no_this_arg;
1050 StackArgumentsAccessor args(rsp, rax);
1051 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1052 __ movp(rbx, rdx);
1053 __ movp(rdi, args.GetReceiverOperand());
1054 __ testp(rax, rax);
1055 __ j(zero, &no_this_arg, Label::kNear);
1056 {
1057 __ movp(rdx, args.GetArgumentOperand(1));
1058 __ cmpp(rax, Immediate(1));
1059 __ j(equal, &no_arg_array, Label::kNear);
1060 __ movp(rbx, args.GetArgumentOperand(2));
1061 __ bind(&no_arg_array);
1062 }
1063 __ bind(&no_this_arg);
1064 __ PopReturnAddressTo(rcx);
1065 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1066 __ Push(rdx);
1067 __ PushReturnAddressFrom(rcx);
1068 __ movp(rax, rbx);
1069 }
1070
1071 // ----------- S t a t e -------------
1072 // -- rax : argArray
1073 // -- rdi : receiver
1074 // -- rsp[0] : return address
1075 // -- rsp[8] : thisArg
1076 // -----------------------------------
1077
1078 // 2. Make sure the receiver is actually callable.
1079 Label receiver_not_callable;
1080 __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
1081 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1082 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1083 Immediate(1 << Map::kIsCallable));
1084 __ j(zero, &receiver_not_callable, Label::kNear);
1085
1086 // 3. Tail call with no arguments if argArray is null or undefined.
1087 Label no_arguments;
1088 __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1089 __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments,
1090 Label::kNear);
1091
1092 // 4a. Apply the receiver to the given argArray (passing undefined for
1093 // new.target).
1094 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1095 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1096
1097 // 4b. The argArray is either null or undefined, so we tail call without any
Ben Murdoch097c5b22016-05-18 11:27:45 +01001098 // arguments to the receiver. Since we did not create a frame for
1099 // Function.prototype.apply() yet, we use a normal Call builtin here.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001100 __ bind(&no_arguments);
1101 {
1102 __ Set(rax, 0);
1103 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1104 }
1105
1106 // 4c. The receiver is not callable, throw an appropriate TypeError.
1107 __ bind(&receiver_not_callable);
1108 {
1109 StackArgumentsAccessor args(rsp, 0);
1110 __ movp(args.GetReceiverOperand(), rdi);
1111 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1112 }
1113}
1114
1115
1116// static
1117void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001118 // Stack Layout:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001119 // rsp[0] : Return address
1120 // rsp[8] : Argument n
1121 // rsp[16] : Argument n-1
Ben Murdochb0fe1622011-05-05 13:52:32 +01001122 // ...
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001123 // rsp[8 * n] : Argument 1
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001124 // rsp[8 * (n + 1)] : Receiver (callable to call)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001125 //
1126 // rax contains the number of arguments, n, not counting the receiver.
1127 //
1128 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001129 {
1130 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001131 __ testp(rax, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001132 __ j(not_zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001133 __ PopReturnAddressTo(rbx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001134 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135 __ PushReturnAddressFrom(rbx);
1136 __ incp(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001137 __ bind(&done);
1138 }
1139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001140 // 2. Get the callable to call (passed as receiver) from the stack.
1141 {
1142 StackArgumentsAccessor args(rsp, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001143 __ movp(rdi, args.GetReceiverOperand());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001144 }
1145
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001146 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb0fe1622011-05-05 13:52:32 +01001147 // (overwriting the original receiver). Adjust argument count to make
1148 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149 {
1150 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001151 __ movp(rcx, rax);
1152 StackArgumentsAccessor args(rsp, rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001153 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154 __ movp(rbx, args.GetArgumentOperand(1));
1155 __ movp(args.GetArgumentOperand(0), rbx);
1156 __ decp(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001157 __ j(not_zero, &loop); // While non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001158 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1159 __ decp(rax); // One fewer argument (first argument is new receiver).
Ben Murdochb0fe1622011-05-05 13:52:32 +01001160 }
1161
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001162 // 4. Call the callable.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001163 // Since we did not create a frame for Function.prototype.call() yet,
1164 // we use a normal Call builtin here.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001165 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001166}
1167
1168
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001169void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1170 // ----------- S t a t e -------------
1171 // -- rax : argc
1172 // -- rsp[0] : return address
1173 // -- rsp[8] : argumentsList
1174 // -- rsp[16] : thisArgument
1175 // -- rsp[24] : target
1176 // -- rsp[32] : receiver
1177 // -----------------------------------
1178
1179 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1180 // remove all arguments from the stack (including the receiver), and push
1181 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001182 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001183 Label done;
1184 StackArgumentsAccessor args(rsp, rax);
1185 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1186 __ movp(rdx, rdi);
1187 __ movp(rbx, rdi);
1188 __ cmpp(rax, Immediate(1));
1189 __ j(below, &done, Label::kNear);
1190 __ movp(rdi, args.GetArgumentOperand(1)); // target
1191 __ j(equal, &done, Label::kNear);
1192 __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument
1193 __ cmpp(rax, Immediate(3));
1194 __ j(below, &done, Label::kNear);
1195 __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList
1196 __ bind(&done);
1197 __ PopReturnAddressTo(rcx);
1198 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1199 __ Push(rdx);
1200 __ PushReturnAddressFrom(rcx);
1201 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001202 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001203
1204 // ----------- S t a t e -------------
1205 // -- rax : argumentsList
1206 // -- rdi : target
1207 // -- rsp[0] : return address
1208 // -- rsp[8] : thisArgument
1209 // -----------------------------------
1210
1211 // 2. Make sure the target is actually callable.
1212 Label target_not_callable;
1213 __ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
1214 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1215 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1216 Immediate(1 << Map::kIsCallable));
1217 __ j(zero, &target_not_callable, Label::kNear);
1218
1219 // 3a. Apply the target to the given argumentsList (passing undefined for
1220 // new.target).
1221 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1222 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1223
1224 // 3b. The target is not callable, throw an appropriate TypeError.
1225 __ bind(&target_not_callable);
1226 {
1227 StackArgumentsAccessor args(rsp, 0);
1228 __ movp(args.GetReceiverOperand(), rdi);
1229 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1230 }
1231}
1232
1233
1234void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1235 // ----------- S t a t e -------------
1236 // -- rax : argc
1237 // -- rsp[0] : return address
1238 // -- rsp[8] : new.target (optional)
1239 // -- rsp[16] : argumentsList
1240 // -- rsp[24] : target
1241 // -- rsp[32] : receiver
1242 // -----------------------------------
1243
1244 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1245 // new.target into rdx (if present, otherwise use target), remove all
1246 // arguments from the stack (including the receiver), and push thisArgument
1247 // (if present) instead.
1248 {
1249 Label done;
1250 StackArgumentsAccessor args(rsp, rax);
1251 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1252 __ movp(rdx, rdi);
1253 __ movp(rbx, rdi);
1254 __ cmpp(rax, Immediate(1));
1255 __ j(below, &done, Label::kNear);
1256 __ movp(rdi, args.GetArgumentOperand(1)); // target
1257 __ movp(rdx, rdi); // new.target defaults to target
1258 __ j(equal, &done, Label::kNear);
1259 __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList
1260 __ cmpp(rax, Immediate(3));
1261 __ j(below, &done, Label::kNear);
1262 __ movp(rdx, args.GetArgumentOperand(3)); // new.target
1263 __ bind(&done);
1264 __ PopReturnAddressTo(rcx);
1265 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1266 __ PushRoot(Heap::kUndefinedValueRootIndex);
1267 __ PushReturnAddressFrom(rcx);
1268 __ movp(rax, rbx);
1269 }
1270
1271 // ----------- S t a t e -------------
1272 // -- rax : argumentsList
1273 // -- rdx : new.target
1274 // -- rdi : target
1275 // -- rsp[0] : return address
1276 // -- rsp[8] : receiver (undefined)
1277 // -----------------------------------
1278
1279 // 2. Make sure the target is actually a constructor.
1280 Label target_not_constructor;
1281 __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear);
1282 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1283 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1284 Immediate(1 << Map::kIsConstructor));
1285 __ j(zero, &target_not_constructor, Label::kNear);
1286
1287 // 3. Make sure the target is actually a constructor.
1288 Label new_target_not_constructor;
1289 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1290 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1291 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1292 Immediate(1 << Map::kIsConstructor));
1293 __ j(zero, &new_target_not_constructor, Label::kNear);
1294
1295 // 4a. Construct the target with the given new.target and argumentsList.
1296 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1297
1298 // 4b. The target is not a constructor, throw an appropriate TypeError.
1299 __ bind(&target_not_constructor);
1300 {
1301 StackArgumentsAccessor args(rsp, 0);
1302 __ movp(args.GetReceiverOperand(), rdi);
1303 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1304 }
1305
1306 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1307 __ bind(&new_target_not_constructor);
1308 {
1309 StackArgumentsAccessor args(rsp, 0);
1310 __ movp(args.GetReceiverOperand(), rdx);
1311 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1312 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001313}
1314
1315
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001316void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1317 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 // -- rax : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001319 // -- rsp[0] : return address
1320 // -- rsp[8] : last argument
1321 // -----------------------------------
1322 Label generic_array_code;
1323
1324 // Get the InternalArray function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001325 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001326
1327 if (FLAG_debug_code) {
1328 // Initial map for the builtin InternalArray functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001330 // Will both indicate a NULL and a Smi.
1331 STATIC_ASSERT(kSmiTag == 0);
1332 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001333 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001334 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001335 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001336 }
1337
1338 // Run the native code for the InternalArray function called as a normal
1339 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001340 // tail call a stub
1341 InternalArrayConstructorStub stub(masm->isolate());
1342 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001343}
1344
1345
1346void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1347 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001348 // -- rax : argc
Ben Murdochb0fe1622011-05-05 13:52:32 +01001349 // -- rsp[0] : return address
1350 // -- rsp[8] : last argument
1351 // -----------------------------------
1352 Label generic_array_code;
1353
1354 // Get the Array function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001355 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001356
1357 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001358 // Initial map for the builtin Array functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001359 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001360 // Will both indicate a NULL and a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001361 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001362 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001363 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001364 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001365 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001366 }
1367
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368 __ movp(rdx, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001369 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001370 // tail call a stub
1371 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1372 ArrayConstructorStub stub(masm->isolate());
1373 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001374}
1375
1376
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001377// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001378void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1379 // ----------- S t a t e -------------
1380 // -- rax : number of arguments
1381 // -- rsp[0] : return address
1382 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1383 // -- rsp[(argc + 1) * 8] : receiver
1384 // -----------------------------------
1385 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1386 Heap::RootListIndex const root_index =
1387 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1388 : Heap::kMinusInfinityValueRootIndex;
1389 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0;
1390
1391 // Load the accumulator with the default return value (either -Infinity or
1392 // +Infinity), with the tagged value in rdx and the double value in xmm0.
1393 __ LoadRoot(rdx, root_index);
1394 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1395 __ Move(rcx, rax);
1396
1397 Label done_loop, loop;
1398 __ bind(&loop);
1399 {
1400 // Check if all parameters done.
1401 __ testp(rcx, rcx);
1402 __ j(zero, &done_loop);
1403
1404 // Load the next parameter tagged value into rbx.
1405 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
1406
1407 // Load the double value of the parameter into xmm1, maybe converting the
1408 // parameter to a number first using the ToNumberStub if necessary.
1409 Label convert, convert_smi, convert_number, done_convert;
1410 __ bind(&convert);
1411 __ JumpIfSmi(rbx, &convert_smi);
1412 __ JumpIfRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1413 Heap::kHeapNumberMapRootIndex, &convert_number);
1414 {
1415 // Parameter is not a Number, use the ToNumberStub to convert it.
1416 FrameScope scope(masm, StackFrame::INTERNAL);
1417 __ Integer32ToSmi(rax, rax);
1418 __ Integer32ToSmi(rcx, rcx);
1419 __ Push(rax);
1420 __ Push(rcx);
1421 __ Push(rdx);
1422 __ movp(rax, rbx);
1423 ToNumberStub stub(masm->isolate());
1424 __ CallStub(&stub);
1425 __ movp(rbx, rax);
1426 __ Pop(rdx);
1427 __ Pop(rcx);
1428 __ Pop(rax);
1429 {
1430 // Restore the double accumulator value (xmm0).
1431 Label restore_smi, done_restore;
1432 __ JumpIfSmi(rdx, &restore_smi, Label::kNear);
1433 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1434 __ jmp(&done_restore, Label::kNear);
1435 __ bind(&restore_smi);
1436 __ SmiToDouble(xmm0, rdx);
1437 __ bind(&done_restore);
1438 }
1439 __ SmiToInteger32(rcx, rcx);
1440 __ SmiToInteger32(rax, rax);
1441 }
1442 __ jmp(&convert);
1443 __ bind(&convert_number);
1444 __ Movsd(xmm1, FieldOperand(rbx, HeapNumber::kValueOffset));
1445 __ jmp(&done_convert, Label::kNear);
1446 __ bind(&convert_smi);
1447 __ SmiToDouble(xmm1, rbx);
1448 __ bind(&done_convert);
1449
1450 // Perform the actual comparison with the accumulator value on the left hand
1451 // side (xmm0) and the next parameter value on the right hand side (xmm1).
1452 Label compare_equal, compare_nan, compare_swap, done_compare;
1453 __ Ucomisd(xmm0, xmm1);
1454 __ j(parity_even, &compare_nan, Label::kNear);
1455 __ j(cc, &done_compare, Label::kNear);
1456 __ j(equal, &compare_equal, Label::kNear);
1457
1458 // Result is on the right hand side.
1459 __ bind(&compare_swap);
1460 __ Movaps(xmm0, xmm1);
1461 __ Move(rdx, rbx);
1462 __ jmp(&done_compare, Label::kNear);
1463
1464 // At least one side is NaN, which means that the result will be NaN too.
1465 __ bind(&compare_nan);
1466 __ LoadRoot(rdx, Heap::kNanValueRootIndex);
1467 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1468 __ jmp(&done_compare, Label::kNear);
1469
1470 // Left and right hand side are equal, check for -0 vs. +0.
1471 __ bind(&compare_equal);
1472 __ Movmskpd(kScratchRegister, reg);
1473 __ testl(kScratchRegister, Immediate(1));
1474 __ j(not_zero, &compare_swap);
1475
1476 __ bind(&done_compare);
1477 __ decp(rcx);
1478 __ jmp(&loop);
1479 }
1480
1481 __ bind(&done_loop);
1482 __ PopReturnAddressTo(rcx);
1483 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1484 __ PushReturnAddressFrom(rcx);
1485 __ movp(rax, rdx);
1486 __ Ret();
1487}
1488
1489// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001490void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001491 // ----------- S t a t e -------------
1492 // -- rax : number of arguments
1493 // -- rdi : constructor function
1494 // -- rsp[0] : return address
1495 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1496 // -- rsp[(argc + 1) * 8] : receiver
1497 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001498
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001499 // 1. Load the first argument into rax and get rid of the rest (including the
1500 // receiver).
1501 Label no_arguments;
1502 {
1503 StackArgumentsAccessor args(rsp, rax);
1504 __ testp(rax, rax);
1505 __ j(zero, &no_arguments, Label::kNear);
1506 __ movp(rbx, args.GetArgumentOperand(1));
1507 __ PopReturnAddressTo(rcx);
1508 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1509 __ PushReturnAddressFrom(rcx);
1510 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001511 }
1512
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 // 2a. Convert the first argument to a number.
1514 ToNumberStub stub(masm->isolate());
1515 __ TailCallStub(&stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001516
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001517 // 2b. No arguments, return +0 (already in rax).
1518 __ bind(&no_arguments);
1519 __ ret(1 * kPointerSize);
1520}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001521
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001522
1523// static
1524void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001525 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526 // -- rax : number of arguments
1527 // -- rdi : constructor function
1528 // -- rdx : new target
1529 // -- rsp[0] : return address
1530 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1531 // -- rsp[(argc + 1) * 8] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001532 // -----------------------------------
1533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 // 1. Make sure we operate in the context of the called function.
1535 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 // 2. Load the first argument into rbx and get rid of the rest (including the
1538 // receiver).
1539 {
1540 StackArgumentsAccessor args(rsp, rax);
1541 Label no_arguments, done;
1542 __ testp(rax, rax);
1543 __ j(zero, &no_arguments, Label::kNear);
1544 __ movp(rbx, args.GetArgumentOperand(1));
1545 __ jmp(&done, Label::kNear);
1546 __ bind(&no_arguments);
1547 __ Move(rbx, Smi::FromInt(0));
1548 __ bind(&done);
1549 __ PopReturnAddressTo(rcx);
1550 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1551 __ PushReturnAddressFrom(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001552 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 // 3. Make sure rbx is a number.
1555 {
1556 Label done_convert;
1557 __ JumpIfSmi(rbx, &done_convert);
1558 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1559 Heap::kHeapNumberMapRootIndex);
1560 __ j(equal, &done_convert);
1561 {
1562 FrameScope scope(masm, StackFrame::INTERNAL);
1563 __ Push(rdx);
1564 __ Push(rdi);
1565 __ Move(rax, rbx);
1566 ToNumberStub stub(masm->isolate());
1567 __ CallStub(&stub);
1568 __ Move(rbx, rax);
1569 __ Pop(rdi);
1570 __ Pop(rdx);
1571 }
1572 __ bind(&done_convert);
1573 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001574
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001575 // 4. Check if new target and constructor differ.
1576 Label new_object;
1577 __ cmpp(rdx, rdi);
1578 __ j(not_equal, &new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001579
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001580 // 5. Allocate a JSValue wrapper for the number.
1581 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1582 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001583
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001584 // 6. Fallback to the runtime to create new object.
1585 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001586 {
1587 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001588 __ Push(rbx); // the first argument
Ben Murdoch097c5b22016-05-18 11:27:45 +01001589 FastNewObjectStub stub(masm->isolate());
1590 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001591 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001592 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 __ Ret();
1594}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001595
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001596
1597// static
1598void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1599 // ----------- S t a t e -------------
1600 // -- rax : number of arguments
1601 // -- rdi : constructor function
1602 // -- rsp[0] : return address
1603 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1604 // -- rsp[(argc + 1) * 8] : receiver
1605 // -----------------------------------
1606
1607 // 1. Load the first argument into rax and get rid of the rest (including the
1608 // receiver).
1609 Label no_arguments;
1610 {
1611 StackArgumentsAccessor args(rsp, rax);
1612 __ testp(rax, rax);
1613 __ j(zero, &no_arguments, Label::kNear);
1614 __ movp(rbx, args.GetArgumentOperand(1));
1615 __ PopReturnAddressTo(rcx);
1616 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1617 __ PushReturnAddressFrom(rcx);
1618 __ movp(rax, rbx);
1619 }
1620
1621 // 2a. At least one argument, return rax if it's a string, otherwise
1622 // dispatch to appropriate conversion.
1623 Label to_string, symbol_descriptive_string;
1624 {
1625 __ JumpIfSmi(rax, &to_string, Label::kNear);
1626 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1627 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
1628 __ j(above, &to_string, Label::kNear);
1629 __ j(equal, &symbol_descriptive_string, Label::kNear);
1630 __ Ret();
1631 }
1632
1633 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001634 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001635 {
1636 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
1637 __ ret(1 * kPointerSize);
1638 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001639
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001640 // 3a. Convert rax to a string.
1641 __ bind(&to_string);
1642 {
1643 ToStringStub stub(masm->isolate());
1644 __ TailCallStub(&stub);
1645 }
1646
1647 // 3b. Convert symbol in rax to a string.
1648 __ bind(&symbol_descriptive_string);
1649 {
1650 __ PopReturnAddressTo(rcx);
1651 __ Push(rax);
1652 __ PushReturnAddressFrom(rcx);
1653 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1654 }
1655}
1656
1657
1658// static
1659void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1660 // ----------- S t a t e -------------
1661 // -- rax : number of arguments
1662 // -- rdi : constructor function
1663 // -- rdx : new target
1664 // -- rsp[0] : return address
1665 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1666 // -- rsp[(argc + 1) * 8] : receiver
1667 // -----------------------------------
1668
1669 // 1. Make sure we operate in the context of the called function.
1670 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1671
1672 // 2. Load the first argument into rbx and get rid of the rest (including the
1673 // receiver).
1674 {
1675 StackArgumentsAccessor args(rsp, rax);
1676 Label no_arguments, done;
1677 __ testp(rax, rax);
1678 __ j(zero, &no_arguments, Label::kNear);
1679 __ movp(rbx, args.GetArgumentOperand(1));
1680 __ jmp(&done, Label::kNear);
1681 __ bind(&no_arguments);
1682 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1683 __ bind(&done);
1684 __ PopReturnAddressTo(rcx);
1685 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1686 __ PushReturnAddressFrom(rcx);
1687 }
1688
1689 // 3. Make sure rbx is a string.
1690 {
1691 Label convert, done_convert;
1692 __ JumpIfSmi(rbx, &convert, Label::kNear);
1693 __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx);
1694 __ j(below, &done_convert);
1695 __ bind(&convert);
1696 {
1697 FrameScope scope(masm, StackFrame::INTERNAL);
1698 ToStringStub stub(masm->isolate());
1699 __ Push(rdx);
1700 __ Push(rdi);
1701 __ Move(rax, rbx);
1702 __ CallStub(&stub);
1703 __ Move(rbx, rax);
1704 __ Pop(rdi);
1705 __ Pop(rdx);
1706 }
1707 __ bind(&done_convert);
1708 }
1709
1710 // 4. Check if new target and constructor differ.
1711 Label new_object;
1712 __ cmpp(rdx, rdi);
1713 __ j(not_equal, &new_object);
1714
1715 // 5. Allocate a JSValue wrapper for the string.
1716 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1717 __ Ret();
1718
1719 // 6. Fallback to the runtime to create new object.
1720 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001721 {
1722 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723 __ Push(rbx); // the first argument
Ben Murdoch097c5b22016-05-18 11:27:45 +01001724 FastNewObjectStub stub(masm->isolate());
1725 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001726 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001727 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001728 __ Ret();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001729}
1730
1731
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1733 Label* stack_overflow) {
1734 // ----------- S t a t e -------------
1735 // -- rax : actual number of arguments
1736 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001737 // -- rdx : new target (passed through to callee)
1738 // -- rdi : function (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001739 // -----------------------------------
1740 // Check the stack for overflow. We are not trying to catch
1741 // interruptions (e.g. debug break and preemption) here, so the "real stack
1742 // limit" is checked.
1743 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001744 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001745 __ movp(rcx, rsp);
1746 // Make rcx the space we have left. The stack might already be overflowed
1747 // here which will cause rcx to become negative.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001748 __ subp(rcx, r8);
1749 // Make r8 the space we need for the array when it is unrolled onto the
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001750 // stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 __ movp(r8, rbx);
1752 __ shlp(r8, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753 // Check if the arguments will overflow the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001754 __ cmpp(rcx, r8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001755 __ j(less_equal, stack_overflow); // Signed comparison.
1756}
1757
1758
Ben Murdochb0fe1622011-05-05 13:52:32 +01001759static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 __ pushq(rbp);
1761 __ movp(rbp, rsp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001762
1763 // Store the arguments adaptor context sentinel.
1764 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1765
1766 // Push the function on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767 __ Push(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001768
Ben Murdoch257744e2011-11-30 15:57:28 +00001769 // Preserve the number of arguments on the stack. Must preserve rax,
1770 // rbx and rcx because these registers are used when copying the
Ben Murdochb0fe1622011-05-05 13:52:32 +01001771 // arguments and the receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00001772 __ Integer32ToSmi(r8, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001773 __ Push(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001774}
1775
1776
1777static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1778 // Retrieve the number of arguments from the stack. Number is a Smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001779 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001780
1781 // Leave the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001782 __ movp(rsp, rbp);
1783 __ popq(rbp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001784
1785 // Remove caller arguments from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001786 __ PopReturnAddressTo(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001787 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1789 __ PushReturnAddressFrom(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001790}
1791
1792
1793void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1794 // ----------- S t a t e -------------
1795 // -- rax : actual number of arguments
1796 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797 // -- rdx : new target (passed through to callee)
1798 // -- rdi : function (passed through to callee)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001799 // -----------------------------------
1800
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001801 Label invoke, dont_adapt_arguments, stack_overflow;
Steve Block44f0eee2011-05-26 01:26:41 +01001802 Counters* counters = masm->isolate()->counters();
1803 __ IncrementCounter(counters->arguments_adaptors(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001804
1805 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001806 __ cmpp(rax, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001807 __ j(less, &too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001809 __ j(equal, &dont_adapt_arguments);
1810
1811 { // Enough parameters: Actual >= expected.
1812 __ bind(&enough);
1813 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001814 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001815
1816 // Copy receiver and all expected arguments.
1817 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001818 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001819 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001820
1821 Label copy;
1822 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001823 __ incp(r8);
1824 __ Push(Operand(rax, 0));
1825 __ subp(rax, Immediate(kPointerSize));
1826 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001827 __ j(less, &copy);
1828 __ jmp(&invoke);
1829 }
1830
1831 { // Too few parameters: Actual < expected.
1832 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001833
Ben Murdochb0fe1622011-05-05 13:52:32 +01001834 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001835 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001836
1837 // Copy receiver and all actual arguments.
1838 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001839 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001840 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001841
1842 Label copy;
1843 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001844 __ incp(r8);
1845 __ Push(Operand(rdi, 0));
1846 __ subp(rdi, Immediate(kPointerSize));
1847 __ cmpp(r8, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001848 __ j(less, &copy);
1849
1850 // Fill remaining expected arguments with undefined values.
1851 Label fill;
1852 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1853 __ bind(&fill);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001854 __ incp(r8);
1855 __ Push(kScratchRegister);
1856 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001857 __ j(less, &fill);
1858
1859 // Restore function pointer.
Ben Murdochda12d292016-06-02 14:46:10 +01001860 __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001861 }
1862
1863 // Call the entry point.
1864 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001865 __ movp(rax, rbx);
1866 // rax : expected number of arguments
1867 // rdx : new target (passed through to callee)
1868 // rdi : function (passed through to callee)
1869 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1870 __ call(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001871
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001872 // Store offset of return address for deoptimizer.
1873 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1874
Ben Murdochb0fe1622011-05-05 13:52:32 +01001875 // Leave frame and return.
1876 LeaveArgumentsAdaptorFrame(masm);
1877 __ ret(0);
1878
1879 // -------------------------------------------
1880 // Dont adapt arguments.
1881 // -------------------------------------------
1882 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001883 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1884 __ jmp(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001885
1886 __ bind(&stack_overflow);
1887 {
1888 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001889 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890 __ int3();
1891 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001892}
1893
1894
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001895// static
1896void Builtins::Generate_Apply(MacroAssembler* masm) {
1897 // ----------- S t a t e -------------
1898 // -- rax : argumentsList
1899 // -- rdi : target
1900 // -- rdx : new.target (checked to be constructor or undefined)
1901 // -- rsp[0] : return address.
1902 // -- rsp[8] : thisArgument
1903 // -----------------------------------
1904
1905 // Create the list of arguments from the array-like argumentsList.
1906 {
1907 Label create_arguments, create_array, create_runtime, done_create;
1908 __ JumpIfSmi(rax, &create_runtime);
1909
1910 // Load the map of argumentsList into rcx.
1911 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1912
1913 // Load native context into rbx.
1914 __ movp(rbx, NativeContextOperand());
1915
1916 // Check if argumentsList is an (unmodified) arguments object.
1917 __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1918 __ j(equal, &create_arguments);
1919 __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX));
1920 __ j(equal, &create_arguments);
1921
1922 // Check if argumentsList is a fast JSArray.
1923 __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
1924 __ j(equal, &create_array);
1925
1926 // Ask the runtime to create the list (actually a FixedArray).
1927 __ bind(&create_runtime);
1928 {
1929 FrameScope scope(masm, StackFrame::INTERNAL);
1930 __ Push(rdi);
1931 __ Push(rdx);
1932 __ Push(rax);
1933 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1934 __ Pop(rdx);
1935 __ Pop(rdi);
1936 __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset));
1937 }
1938 __ jmp(&done_create);
1939
1940 // Try to create the list from an arguments object.
1941 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001942 __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001943 __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset));
1944 __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
1945 __ j(not_equal, &create_runtime);
1946 __ SmiToInteger32(rbx, rbx);
1947 __ movp(rax, rcx);
1948 __ jmp(&done_create);
1949
1950 // Try to create the list from a JSArray object.
1951 __ bind(&create_array);
1952 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
1953 __ DecodeField<Map::ElementsKindBits>(rcx);
1954 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1955 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1956 STATIC_ASSERT(FAST_ELEMENTS == 2);
1957 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
1958 __ j(above, &create_runtime);
1959 __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
1960 __ j(equal, &create_runtime);
1961 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
1962 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
1963
1964 __ bind(&done_create);
1965 }
1966
1967 // Check for stack overflow.
1968 {
1969 // Check the stack for overflow. We are not trying to catch interruptions
1970 // (i.e. debug break and preemption) here, so check the "real stack limit".
1971 Label done;
1972 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
1973 __ movp(rcx, rsp);
1974 // Make rcx the space we have left. The stack might already be overflowed
1975 // here which will cause rcx to become negative.
1976 __ subp(rcx, kScratchRegister);
1977 __ sarp(rcx, Immediate(kPointerSizeLog2));
1978 // Check if the arguments will overflow the stack.
1979 __ cmpp(rcx, rbx);
1980 __ j(greater, &done, Label::kNear); // Signed comparison.
1981 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1982 __ bind(&done);
1983 }
1984
1985 // ----------- S t a t e -------------
1986 // -- rdi : target
1987 // -- rax : args (a FixedArray built from argumentsList)
1988 // -- rbx : len (number of elements to push from args)
1989 // -- rdx : new.target (checked to be constructor or undefined)
1990 // -- rsp[0] : return address.
1991 // -- rsp[8] : thisArgument
1992 // -----------------------------------
1993
1994 // Push arguments onto the stack (thisArgument is already on the stack).
1995 {
1996 __ PopReturnAddressTo(r8);
1997 __ Set(rcx, 0);
1998 Label done, loop;
1999 __ bind(&loop);
2000 __ cmpl(rcx, rbx);
2001 __ j(equal, &done, Label::kNear);
2002 __ Push(
2003 FieldOperand(rax, rcx, times_pointer_size, FixedArray::kHeaderSize));
2004 __ incl(rcx);
2005 __ jmp(&loop);
2006 __ bind(&done);
2007 __ PushReturnAddressFrom(r8);
2008 __ Move(rax, rcx);
2009 }
2010
2011 // Dispatch to Call or Construct depending on whether new.target is undefined.
2012 {
2013 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2014 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2015 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2016 }
2017}
2018
Ben Murdoch097c5b22016-05-18 11:27:45 +01002019namespace {
2020
2021// Drops top JavaScript frame and an arguments adaptor frame below it (if
2022// present) preserving all the arguments prepared for current call.
2023// Does nothing if debugger is currently active.
2024// ES6 14.6.3. PrepareForTailCall
2025//
2026// Stack structure for the function g() tail calling f():
2027//
2028// ------- Caller frame: -------
2029// | ...
2030// | g()'s arg M
2031// | ...
2032// | g()'s arg 1
2033// | g()'s receiver arg
2034// | g()'s caller pc
2035// ------- g()'s frame: -------
2036// | g()'s caller fp <- fp
2037// | g()'s context
2038// | function pointer: g
2039// | -------------------------
2040// | ...
2041// | ...
2042// | f()'s arg N
2043// | ...
2044// | f()'s arg 1
2045// | f()'s receiver arg
2046// | f()'s caller pc <- sp
2047// ----------------------
2048//
2049void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2050 Register scratch1, Register scratch2,
2051 Register scratch3) {
2052 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2053 Comment cmnt(masm, "[ PrepareForTailCall");
2054
Ben Murdochda12d292016-06-02 14:46:10 +01002055 // Prepare for tail call only if ES2015 tail call elimination is active.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002056 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002057 ExternalReference is_tail_call_elimination_enabled =
2058 ExternalReference::is_tail_call_elimination_enabled_address(
2059 masm->isolate());
2060 __ Move(kScratchRegister, is_tail_call_elimination_enabled);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002061 __ cmpb(Operand(kScratchRegister, 0), Immediate(0));
Ben Murdochda12d292016-06-02 14:46:10 +01002062 __ j(equal, &done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002063
2064 // Drop possible interpreter handler/stub frame.
2065 {
2066 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002067 __ Cmp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01002068 Smi::FromInt(StackFrame::STUB));
2069 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2070 __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2071 __ bind(&no_interpreter_frame);
2072 }
2073
2074 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002075 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002076 Label no_arguments_adaptor, formal_parameter_count_loaded;
2077 __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002078 __ Cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01002079 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2080 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2081
Ben Murdochda12d292016-06-02 14:46:10 +01002082 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002083 __ movp(rbp, scratch2);
2084 __ SmiToInteger32(
Ben Murdochda12d292016-06-02 14:46:10 +01002085 caller_args_count_reg,
2086 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002087 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2088
2089 __ bind(&no_arguments_adaptor);
2090 // Load caller's formal parameter count
2091 __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2092 __ movp(scratch1,
2093 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2094 __ LoadSharedFunctionInfoSpecialField(
Ben Murdochda12d292016-06-02 14:46:10 +01002095 caller_args_count_reg, scratch1,
2096 SharedFunctionInfo::kFormalParameterCountOffset);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002097
2098 __ bind(&formal_parameter_count_loaded);
2099
Ben Murdochda12d292016-06-02 14:46:10 +01002100 ParameterCount callee_args_count(args_reg);
2101 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2102 scratch3, ReturnAddressState::kOnStack);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002103 __ bind(&done);
2104}
2105} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002106
2107// static
2108void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002109 ConvertReceiverMode mode,
2110 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002111 // ----------- S t a t e -------------
2112 // -- rax : the number of arguments (not including the receiver)
2113 // -- rdi : the function to call (checked to be a JSFunction)
2114 // -----------------------------------
2115 StackArgumentsAccessor args(rsp, rax);
2116 __ AssertFunction(rdi);
2117
2118 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2119 // Check that the function is not a "classConstructor".
2120 Label class_constructor;
2121 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2122 __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset),
2123 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2124 __ j(not_zero, &class_constructor);
2125
2126 // ----------- S t a t e -------------
2127 // -- rax : the number of arguments (not including the receiver)
2128 // -- rdx : the shared function info.
2129 // -- rdi : the function to call (checked to be a JSFunction)
2130 // -----------------------------------
2131
2132 // Enter the context of the function; ToObject has to run in the function
2133 // context, and we also need to take the global proxy from the function
2134 // context in case of conversion.
2135 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2136 SharedFunctionInfo::kStrictModeByteOffset);
2137 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2138 // We need to convert the receiver for non-native sloppy mode functions.
2139 Label done_convert;
2140 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
2141 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2142 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2143 __ j(not_zero, &done_convert);
2144 {
2145 // ----------- S t a t e -------------
2146 // -- rax : the number of arguments (not including the receiver)
2147 // -- rdx : the shared function info.
2148 // -- rdi : the function to call (checked to be a JSFunction)
2149 // -- rsi : the function context.
2150 // -----------------------------------
2151
2152 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2153 // Patch receiver to global proxy.
2154 __ LoadGlobalProxy(rcx);
2155 } else {
2156 Label convert_to_object, convert_receiver;
2157 __ movp(rcx, args.GetReceiverOperand());
2158 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2159 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2160 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2161 __ j(above_equal, &done_convert);
2162 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2163 Label convert_global_proxy;
2164 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
2165 &convert_global_proxy, Label::kNear);
2166 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
2167 Label::kNear);
2168 __ bind(&convert_global_proxy);
2169 {
2170 // Patch receiver to global proxy.
2171 __ LoadGlobalProxy(rcx);
2172 }
2173 __ jmp(&convert_receiver);
2174 }
2175 __ bind(&convert_to_object);
2176 {
2177 // Convert receiver using ToObject.
2178 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2179 // in the fast case? (fall back to AllocateInNewSpace?)
2180 FrameScope scope(masm, StackFrame::INTERNAL);
2181 __ Integer32ToSmi(rax, rax);
2182 __ Push(rax);
2183 __ Push(rdi);
2184 __ movp(rax, rcx);
2185 ToObjectStub stub(masm->isolate());
2186 __ CallStub(&stub);
2187 __ movp(rcx, rax);
2188 __ Pop(rdi);
2189 __ Pop(rax);
2190 __ SmiToInteger32(rax, rax);
2191 }
2192 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2193 __ bind(&convert_receiver);
2194 }
2195 __ movp(args.GetReceiverOperand(), rcx);
2196 }
2197 __ bind(&done_convert);
2198
2199 // ----------- S t a t e -------------
2200 // -- rax : the number of arguments (not including the receiver)
2201 // -- rdx : the shared function info.
2202 // -- rdi : the function to call (checked to be a JSFunction)
2203 // -- rsi : the function context.
2204 // -----------------------------------
2205
Ben Murdoch097c5b22016-05-18 11:27:45 +01002206 if (tail_call_mode == TailCallMode::kAllow) {
2207 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2208 }
2209
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210 __ LoadSharedFunctionInfoSpecialField(
2211 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
2212 ParameterCount actual(rax);
2213 ParameterCount expected(rbx);
2214
2215 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION,
2216 CheckDebugStepCallWrapper());
2217
2218 // The function is a "classConstructor", need to raise an exception.
2219 __ bind(&class_constructor);
2220 {
2221 FrameScope frame(masm, StackFrame::INTERNAL);
2222 __ Push(rdi);
2223 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2224 }
2225}
2226
2227
2228namespace {
2229
2230void Generate_PushBoundArguments(MacroAssembler* masm) {
2231 // ----------- S t a t e -------------
2232 // -- rax : the number of arguments (not including the receiver)
2233 // -- rdx : new.target (only in case of [[Construct]])
2234 // -- rdi : target (checked to be a JSBoundFunction)
2235 // -----------------------------------
2236
2237 // Load [[BoundArguments]] into rcx and length of that into rbx.
2238 Label no_bound_arguments;
2239 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2240 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2241 __ testl(rbx, rbx);
2242 __ j(zero, &no_bound_arguments);
2243 {
2244 // ----------- S t a t e -------------
2245 // -- rax : the number of arguments (not including the receiver)
2246 // -- rdx : new.target (only in case of [[Construct]])
2247 // -- rdi : target (checked to be a JSBoundFunction)
2248 // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2249 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2250 // -----------------------------------
2251
2252 // Reserve stack space for the [[BoundArguments]].
2253 {
2254 Label done;
2255 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2256 __ subp(rsp, kScratchRegister);
2257 // Check the stack for overflow. We are not trying to catch interruptions
2258 // (i.e. debug break and preemption) here, so check the "real stack
2259 // limit".
2260 __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2261 __ j(greater, &done, Label::kNear); // Signed comparison.
2262 // Restore the stack pointer.
2263 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2264 {
2265 FrameScope scope(masm, StackFrame::MANUAL);
2266 __ EnterFrame(StackFrame::INTERNAL);
2267 __ CallRuntime(Runtime::kThrowStackOverflow);
2268 }
2269 __ bind(&done);
2270 }
2271
2272 // Adjust effective number of arguments to include return address.
2273 __ incl(rax);
2274
2275 // Relocate arguments and return address down the stack.
2276 {
2277 Label loop;
2278 __ Set(rcx, 0);
2279 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2280 __ bind(&loop);
2281 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2282 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2283 __ incl(rcx);
2284 __ cmpl(rcx, rax);
2285 __ j(less, &loop);
2286 }
2287
2288 // Copy [[BoundArguments]] to the stack (below the arguments).
2289 {
2290 Label loop;
2291 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2292 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2293 __ bind(&loop);
2294 __ decl(rbx);
2295 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2296 FixedArray::kHeaderSize));
2297 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2298 __ leal(rax, Operand(rax, 1));
2299 __ j(greater, &loop);
2300 }
2301
2302 // Adjust effective number of arguments (rax contains the number of
2303 // arguments from the call plus return address plus the number of
2304 // [[BoundArguments]]), so we need to subtract one for the return address.
2305 __ decl(rax);
2306 }
2307 __ bind(&no_bound_arguments);
2308}
2309
2310} // namespace
2311
2312
2313// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002314void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2315 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002316 // ----------- S t a t e -------------
2317 // -- rax : the number of arguments (not including the receiver)
2318 // -- rdi : the function to call (checked to be a JSBoundFunction)
2319 // -----------------------------------
2320 __ AssertBoundFunction(rdi);
2321
Ben Murdoch097c5b22016-05-18 11:27:45 +01002322 if (tail_call_mode == TailCallMode::kAllow) {
2323 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2324 }
2325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002326 // Patch the receiver to [[BoundThis]].
2327 StackArgumentsAccessor args(rsp, rax);
2328 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2329 __ movp(args.GetReceiverOperand(), rbx);
2330
2331 // Push the [[BoundArguments]] onto the stack.
2332 Generate_PushBoundArguments(masm);
2333
2334 // Call the [[BoundTargetFunction]] via the Call builtin.
2335 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2336 __ Load(rcx,
2337 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2338 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2339 __ jmp(rcx);
2340}
2341
2342
2343// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002344void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2345 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002346 // ----------- S t a t e -------------
2347 // -- rax : the number of arguments (not including the receiver)
2348 // -- rdi : the target to call (can be any Object)
2349 // -----------------------------------
2350 StackArgumentsAccessor args(rsp, rax);
2351
2352 Label non_callable, non_function, non_smi;
2353 __ JumpIfSmi(rdi, &non_callable);
2354 __ bind(&non_smi);
2355 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002356 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002357 RelocInfo::CODE_TARGET);
2358 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002359 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002360 RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002361
2362 // Check if target has a [[Call]] internal method.
2363 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2364 Immediate(1 << Map::kIsCallable));
2365 __ j(zero, &non_callable);
2366
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002367 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2368 __ j(not_equal, &non_function);
2369
Ben Murdoch097c5b22016-05-18 11:27:45 +01002370 // 0. Prepare for tail call if necessary.
2371 if (tail_call_mode == TailCallMode::kAllow) {
2372 PrepareForTailCall(masm, rax, rbx, rcx, r8);
2373 }
2374
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002375 // 1. Runtime fallback for Proxy [[Call]].
2376 __ PopReturnAddressTo(kScratchRegister);
2377 __ Push(rdi);
2378 __ PushReturnAddressFrom(kScratchRegister);
2379 // Increase the arguments size to include the pushed function and the
2380 // existing receiver on the stack.
2381 __ addp(rax, Immediate(2));
2382 // Tail-call to the runtime.
2383 __ JumpToExternalReference(
2384 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2385
2386 // 2. Call to something else, which might have a [[Call]] internal method (if
2387 // not we raise an exception).
2388 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002389 // Overwrite the original receiver with the (original) target.
2390 __ movp(args.GetReceiverOperand(), rdi);
2391 // Let the "call_as_function_delegate" take care of the rest.
2392 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2393 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002394 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002395 RelocInfo::CODE_TARGET);
2396
2397 // 3. Call to something that is not callable.
2398 __ bind(&non_callable);
2399 {
2400 FrameScope scope(masm, StackFrame::INTERNAL);
2401 __ Push(rdi);
2402 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2403 }
2404}
2405
2406
2407// static
2408void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2409 // ----------- S t a t e -------------
2410 // -- rax : the number of arguments (not including the receiver)
2411 // -- rdx : the new target (checked to be a constructor)
2412 // -- rdi : the constructor to call (checked to be a JSFunction)
2413 // -----------------------------------
2414 __ AssertFunction(rdi);
2415
2416 // Calling convention for function specific ConstructStubs require
2417 // rbx to contain either an AllocationSite or undefined.
2418 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2419
2420 // Tail call to the function-specific construct stub (still in the caller
2421 // context at this point).
2422 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2423 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
2424 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2425 __ jmp(rcx);
2426}
2427
2428
2429// static
2430void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2431 // ----------- S t a t e -------------
2432 // -- rax : the number of arguments (not including the receiver)
2433 // -- rdx : the new target (checked to be a constructor)
2434 // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2435 // -----------------------------------
2436 __ AssertBoundFunction(rdi);
2437
2438 // Push the [[BoundArguments]] onto the stack.
2439 Generate_PushBoundArguments(masm);
2440
2441 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2442 {
2443 Label done;
2444 __ cmpp(rdi, rdx);
2445 __ j(not_equal, &done, Label::kNear);
2446 __ movp(rdx,
2447 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2448 __ bind(&done);
2449 }
2450
2451 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2452 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2453 __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate()));
2454 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2455 __ jmp(rcx);
2456}
2457
2458
2459// static
2460void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2461 // ----------- S t a t e -------------
2462 // -- rax : the number of arguments (not including the receiver)
2463 // -- rdi : the constructor to call (checked to be a JSProxy)
2464 // -- rdx : the new target (either the same as the constructor or
2465 // the JSFunction on which new was invoked initially)
2466 // -----------------------------------
2467
2468 // Call into the Runtime for Proxy [[Construct]].
2469 __ PopReturnAddressTo(kScratchRegister);
2470 __ Push(rdi);
2471 __ Push(rdx);
2472 __ PushReturnAddressFrom(kScratchRegister);
2473 // Include the pushed new_target, constructor and the receiver.
2474 __ addp(rax, Immediate(3));
2475 __ JumpToExternalReference(
2476 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2477}
2478
2479
2480// static
2481void Builtins::Generate_Construct(MacroAssembler* masm) {
2482 // ----------- S t a t e -------------
2483 // -- rax : the number of arguments (not including the receiver)
2484 // -- rdx : the new target (either the same as the constructor or
2485 // the JSFunction on which new was invoked initially)
2486 // -- rdi : the constructor to call (can be any Object)
2487 // -----------------------------------
2488 StackArgumentsAccessor args(rsp, rax);
2489
2490 // Check if target is a Smi.
2491 Label non_constructor;
2492 __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
2493
2494 // Dispatch based on instance type.
2495 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2496 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2497 RelocInfo::CODE_TARGET);
2498
2499 // Check if target has a [[Construct]] internal method.
2500 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2501 Immediate(1 << Map::kIsConstructor));
2502 __ j(zero, &non_constructor, Label::kNear);
2503
2504 // Only dispatch to bound functions after checking whether they are
2505 // constructors.
2506 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2507 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2508 RelocInfo::CODE_TARGET);
2509
2510 // Only dispatch to proxies after checking whether they are constructors.
2511 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2512 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2513 RelocInfo::CODE_TARGET);
2514
2515 // Called Construct on an exotic Object with a [[Construct]] internal method.
2516 {
2517 // Overwrite the original receiver with the (original) target.
2518 __ movp(args.GetReceiverOperand(), rdi);
2519 // Let the "call_as_constructor_delegate" take care of the rest.
2520 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2521 __ Jump(masm->isolate()->builtins()->CallFunction(),
2522 RelocInfo::CODE_TARGET);
2523 }
2524
2525 // Called Construct on an Object that doesn't have a [[Construct]] internal
2526 // method.
2527 __ bind(&non_constructor);
2528 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2529 RelocInfo::CODE_TARGET);
2530}
2531
2532
2533static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2534 Register function_template_info,
2535 Register scratch0, Register scratch1,
2536 Register scratch2,
2537 Label* receiver_check_failed) {
2538 Register signature = scratch0;
2539 Register map = scratch1;
2540 Register constructor = scratch2;
2541
2542 // If there is no signature, return the holder.
2543 __ movp(signature, FieldOperand(function_template_info,
2544 FunctionTemplateInfo::kSignatureOffset));
2545 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
2546 Label receiver_check_passed;
2547 __ j(equal, &receiver_check_passed, Label::kNear);
2548
2549 // Walk the prototype chain.
2550 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
2551 Label prototype_loop_start;
2552 __ bind(&prototype_loop_start);
2553
2554 // Get the constructor, if any.
2555 __ GetMapConstructor(constructor, map, kScratchRegister);
2556 __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE);
2557 Label next_prototype;
2558 __ j(not_equal, &next_prototype, Label::kNear);
2559
2560 // Get the constructor's signature.
2561 Register type = constructor;
2562 __ movp(type,
2563 FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
2564 __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset));
2565
2566 // Loop through the chain of inheriting function templates.
2567 Label function_template_loop;
2568 __ bind(&function_template_loop);
2569
2570 // If the signatures match, we have a compatible receiver.
2571 __ cmpp(signature, type);
2572 __ j(equal, &receiver_check_passed, Label::kNear);
2573
2574 // If the current type is not a FunctionTemplateInfo, load the next prototype
2575 // in the chain.
2576 __ JumpIfSmi(type, &next_prototype, Label::kNear);
2577 __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister);
2578 __ j(not_equal, &next_prototype, Label::kNear);
2579
2580 // Otherwise load the parent function template and iterate.
2581 __ movp(type,
2582 FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
2583 __ jmp(&function_template_loop, Label::kNear);
2584
2585 // Load the next prototype.
2586 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002587 __ testq(FieldOperand(map, Map::kBitField3Offset),
Ben Murdoch097c5b22016-05-18 11:27:45 +01002588 Immediate(Map::HasHiddenPrototype::kMask));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002589 __ j(zero, receiver_check_failed);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002590 __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset));
2591 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002592 // Iterate.
2593 __ jmp(&prototype_loop_start, Label::kNear);
2594
2595 __ bind(&receiver_check_passed);
2596}
2597
2598
2599void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
2600 // ----------- S t a t e -------------
2601 // -- rax : number of arguments (not including the receiver)
2602 // -- rdi : callee
2603 // -- rsi : context
2604 // -- rsp[0] : return address
2605 // -- rsp[8] : last argument
2606 // -- ...
2607 // -- rsp[rax * 8] : first argument
2608 // -- rsp[(rax + 1) * 8] : receiver
2609 // -----------------------------------
2610
2611 StackArgumentsAccessor args(rsp, rax);
2612
2613 // Load the FunctionTemplateInfo.
2614 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2615 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
2616
2617 // Do the compatible receiver check.
2618 Label receiver_check_failed;
2619 __ movp(rcx, args.GetReceiverOperand());
2620 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed);
2621
2622 // Get the callback offset from the FunctionTemplateInfo, and jump to the
2623 // beginning of the code.
2624 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset));
2625 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset));
2626 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2627 __ jmp(rdx);
2628
2629 // Compatible receiver check failed: pop return address, arguments and
2630 // receiver and throw an Illegal Invocation exception.
2631 __ bind(&receiver_check_failed);
2632 __ PopReturnAddressTo(rbx);
2633 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize));
2634 __ addp(rsp, rax);
2635 __ PushReturnAddressFrom(rbx);
2636 {
2637 FrameScope scope(masm, StackFrame::INTERNAL);
2638 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
2639 }
2640}
2641
2642
Ben Murdochb0fe1622011-05-05 13:52:32 +01002643void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002644 // Lookup the function in the JavaScript frame.
2645 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002646 {
2647 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002648 // Pass function as argument.
2649 __ Push(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002650 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002651 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002652
Ben Murdoch257744e2011-11-30 15:57:28 +00002653 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002654 // If the code object is null, just return to the unoptimized code.
2655 __ cmpp(rax, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002656 __ j(not_equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002657 __ ret(0);
2658
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002659 __ bind(&skip);
2660
2661 // Load deoptimization data from the code object.
2662 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2663
2664 // Load the OSR entrypoint offset from the deoptimization data.
2665 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
2666 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
2667
2668 // Compute the target address = code_obj + header_size + osr_offset
2669 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2670
2671 // Overwrite the return address on the stack.
2672 __ movq(StackOperandForReturnAddress(0), rax);
2673
2674 // And "return" to the OSR entry point of the function.
2675 __ ret(0);
2676}
2677
2678
Ben Murdochb0fe1622011-05-05 13:52:32 +01002679#undef __
2680
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002681} // namespace internal
2682} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01002683
2684#endif // V8_TARGET_ARCH_X64