blob: cb092f2f2d72890080ad926628da7af870478ea9 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-factory.h"
8#include "src/codegen.h"
9#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000011
12namespace v8 {
13namespace internal {
14
Ben Murdochb0fe1622011-05-05 13:52:32 +010015
Steve Blocka7e24c12009-10-30 11:49:00 +000016#define __ ACCESS_MASM(masm)
17
Steve Blocka7e24c12009-10-30 11:49:00 +000018
Leon Clarkee46be812010-01-19 14:06:41 +000019void Builtins::Generate_Adaptor(MacroAssembler* masm,
20 CFunctionId id,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 // -- rax : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 // -- rdi : target
25 // -- rdx : new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026 // -- rsp[0] : return address
27 // -- rsp[8] : last argument
Leon Clarkee46be812010-01-19 14:06:41 +000028 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 // -- rsp[8 * argc] : first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +000030 // -- rsp[8 * (argc + 1)] : receiver
Leon Clarkee46be812010-01-19 14:06:41 +000031 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(rdi);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000039
40 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 if (extra_args != BuiltinExtraArguments::kNone) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 __ PopReturnAddressTo(kScratchRegister);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 if (extra_args & BuiltinExtraArguments::kTarget) {
45 ++num_extra_args;
46 __ Push(rdi);
47 }
48 if (extra_args & BuiltinExtraArguments::kNewTarget) {
49 ++num_extra_args;
50 __ Push(rdx);
51 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 __ PushReturnAddressFrom(kScratchRegister);
Leon Clarkee46be812010-01-19 14:06:41 +000053 }
54
Steve Block6ded16b2010-05-10 14:33:55 +010055 // JumpToExternalReference expects rax to contain the number of arguments
Leon Clarkee46be812010-01-19 14:06:41 +000056 // including the receiver and the extra arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 __ addp(rax, Immediate(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058
59 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
Steve Blocka7e24c12009-10-30 11:49:00 +000060}
61
62
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063static void CallRuntimePassFunction(
64 MacroAssembler* masm, Runtime::FunctionId function_id) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000065 // ----------- S t a t e -------------
66 // -- rdx : new target (preserved for callee)
67 // -- rdi : target function (preserved for callee)
68 // -----------------------------------
69
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 // Push a copy of the target function and the new target.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072 __ Push(rdi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000073 __ Push(rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 // Function is also the parameter to the runtime call.
75 __ Push(rdi);
76
77 __ CallRuntime(function_id, 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 // Restore target function and new target.
79 __ Pop(rdx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 __ Pop(rdi);
81}
82
83
84static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
85 __ movp(kScratchRegister,
86 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
87 __ movp(kScratchRegister,
88 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
89 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
90 __ jmp(kScratchRegister);
91}
92
93
94static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
95 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
96 __ jmp(rax);
97}
98
99
100void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
101 // Checking whether the queued function is ready for install is optional,
102 // since we come across interrupts and stack checks elsewhere. However,
103 // not checking may delay installing ready functions, and always checking
104 // would be quite expensive. A good compromise is to first check against
105 // stack limit as a cue for an interrupt signal.
106 Label ok;
107 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
108 __ j(above_equal, &ok);
109
110 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
111 GenerateTailCallToReturnedCode(masm);
112
113 __ bind(&ok);
114 GenerateTailCallToSharedCode(masm);
115}
116
117
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100118static void Generate_JSConstructStubHelper(MacroAssembler* masm,
119 bool is_api_function,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000120 bool create_implicit_receiver) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000121 // ----------- S t a t e -------------
122 // -- rax: number of arguments
123 // -- rdi: constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 // -- rbx: allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000125 // -- rdx: new target
Steve Blocka7e24c12009-10-30 11:49:00 +0000126 // -----------------------------------
127
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100128 // Enter a construct frame.
129 {
130 FrameScope scope(masm, StackFrame::CONSTRUCT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000131
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000132 // Preserve the incoming parameters on the stack.
133 __ AssertUndefinedOrAllocationSite(rbx);
134 __ Push(rbx);
135 __ Integer32ToSmi(rcx, rax);
136 __ Push(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000138 if (create_implicit_receiver) {
139 // Try to allocate the object without transitioning into C code. If any of
140 // the preconditions is not met, the code bails out to the runtime call.
141 Label rt_call, allocated;
142 if (FLAG_inline_new) {
143 // Verify that the new target is a JSFunction.
144 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
145 __ j(not_equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000146
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000147 // Load the initial map and verify that it is in fact a map.
148 // rdx: new target
149 __ movp(rax,
150 FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
151 // Will both indicate a NULL and a Smi
152 DCHECK(kSmiTag == 0);
153 __ JumpIfSmi(rax, &rt_call);
154 // rdi: constructor
155 // rax: initial map (if proven valid below)
156 __ CmpObjectType(rax, MAP_TYPE, rbx);
157 __ j(not_equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159 // Fall back to runtime if the expected base constructor and base
160 // constructor differ.
161 __ cmpp(rdi, FieldOperand(rax, Map::kConstructorOrBackPointerOffset));
162 __ j(not_equal, &rt_call);
Steve Blocka7e24c12009-10-30 11:49:00 +0000163
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000164 // Now allocate the JSObject on the heap.
165 __ movzxbp(r9, FieldOperand(rax, Map::kInstanceSizeOffset));
166 __ shlp(r9, Immediate(kPointerSizeLog2));
167 // r9: size of new object
168 __ Allocate(r9, rbx, r9, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
169 // Allocated the JSObject, now initialize the fields.
170 // rdi: constructor
171 // rdx: new target
172 // rax: initial map
173 // rbx: JSObject (not HeapObject tagged - the actual address).
174 // r9: start of next object
175 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
176 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
177 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
178 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
179 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
Steve Blocka7e24c12009-10-30 11:49:00 +0000180
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000181 // Add the object tag to make the JSObject real, so that we can continue
182 // and jump into the continuation code at any time from now on.
183 __ orp(rbx, Immediate(kHeapObjectTag));
Steve Blocka7e24c12009-10-30 11:49:00 +0000184
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 // Fill all the in-object properties with the appropriate filler.
186 // rbx: JSObject (tagged)
187 // rcx: First in-object property of JSObject (not tagged)
188 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100189
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190 if (!is_api_function) {
191 Label no_inobject_slack_tracking;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100192
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000193 // The code below relies on these assumptions.
194 STATIC_ASSERT(Map::kNoSlackTracking == 0);
195 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
196 // Check if slack tracking is enabled.
197 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
198 __ shrl(rsi, Immediate(Map::ConstructionCounter::kShift));
199 __ j(zero, &no_inobject_slack_tracking); // Map::kNoSlackTracking
200 __ Push(rsi); // Save allocation count value.
201 // Decrease generous allocation count.
202 __ subl(FieldOperand(rax, Map::kBitField3Offset),
203 Immediate(1 << Map::ConstructionCounter::kShift));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000205 // Allocate object with a slack.
206 __ movzxbp(rsi, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
207 __ negp(rsi);
208 __ leap(rsi, Operand(r9, rsi, times_pointer_size, 0));
209 // rsi: offset of first field after pre-allocated fields
210 if (FLAG_debug_code) {
211 __ cmpp(rcx, rsi);
212 __ Assert(less_equal,
213 kUnexpectedNumberOfPreAllocatedPropertyFields);
214 }
215 __ InitializeFieldsWithFiller(rcx, rsi, r11);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100216
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000217 // To allow truncation fill the remaining fields with one pointer
218 // filler map.
219 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
220 __ InitializeFieldsWithFiller(rcx, r9, r11);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100221
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 __ Pop(rsi); // Restore allocation count value before decreasing.
223 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
224 __ j(not_equal, &allocated);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100225
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000226 // Push the constructor, new_target and the object to the stack,
227 // and then the initial map as an argument to the runtime call.
228 __ Push(rdi);
229 __ Push(rdx);
230 __ Push(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000231
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000232 __ Push(rax); // initial map
233 __ CallRuntime(Runtime::kFinalizeInstanceSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000234
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000235 __ Pop(rbx);
236 __ Pop(rdx);
237 __ Pop(rdi);
238
239 // Continue with JSObject being successfully allocated.
240 // rdi: constructor
241 // rdx: new target
242 // rbx: JSObject (tagged)
243 __ jmp(&allocated);
244
245 __ bind(&no_inobject_slack_tracking);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100246 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000247
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 __ InitializeFieldsWithFiller(rcx, r9, r11);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000250 // Continue with JSObject being successfully allocated
251 // rdi: constructor
252 // rdx: new target
253 // rbx: JSObject (tagged)
254 __ jmp(&allocated);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100256
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 // Allocate the new receiver object using the runtime call.
258 // rdi: constructor
259 // rdx: new target
260 __ bind(&rt_call);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100261
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262 // Must restore rsi (context) before calling runtime.
263 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 // Push the constructor and new_target twice, second pair as arguments
266 // to the runtime call.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267 __ Push(rdi);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 __ Push(rdx);
269 __ Push(rdi); // constructor function
270 __ Push(rdx); // new target
271 __ CallRuntime(Runtime::kNewObject);
272 __ movp(rbx, rax); // store result in rbx
273 __ Pop(rdx);
274 __ Pop(rdi);
275
276 // Receiver for constructor call allocated.
277 // rdi: constructor
278 // rdx: new target
279 // rbx: newly allocated object
280 __ bind(&allocated);
281
282 // Retrieve smi-tagged arguments count from the stack.
283 __ movp(rax, Operand(rsp, 0));
284 __ SmiToInteger32(rax, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000285 }
286
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000287 if (create_implicit_receiver) {
288 // Push the allocated receiver to the stack. We need two copies
289 // because we may have to return the original one and the calling
290 // conventions dictate that the called function pops the receiver.
291 __ Push(rbx);
292 __ Push(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000295 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100296
297 // Set up pointer to last argument.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100299
300 // Copy arguments and receiver to the expression stack.
301 Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000302 __ movp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 __ jmp(&entry);
304 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100306 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307 __ decp(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100308 __ j(greater_equal, &loop);
309
310 // Call the function.
311 if (is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000312 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100313 Handle<Code> code =
314 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000315 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100316 } else {
317 ParameterCount actual(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000318 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION,
319 CheckDebugStepCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100320 }
321
322 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000323 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100324 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
325 }
326
327 // Restore context from the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 if (create_implicit_receiver) {
331 // If the result is an object (in the ECMA sense), we should get rid
332 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
333 // on page 74.
334 Label use_receiver, exit;
335 // If the result is a smi, it is *not* an object in the ECMA sense.
336 __ JumpIfSmi(rax, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100337
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000338 // If the type of the result (stored in its map) is less than
339 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
340 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
341 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
342 __ j(above_equal, &exit);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100343
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 // Throw away the result of the constructor invocation and use the
345 // on-stack receiver as the result.
346 __ bind(&use_receiver);
347 __ movp(rax, Operand(rsp, 0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100348
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000349 // Restore the arguments count and leave the construct frame. The
350 // arguments count is stored below the receiver.
351 __ bind(&exit);
352 __ movp(rbx, Operand(rsp, 1 * kPointerSize));
353 } else {
354 __ movp(rbx, Operand(rsp, 0));
355 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100356
357 // Leave construct frame.
Steve Blocka7e24c12009-10-30 11:49:00 +0000358 }
359
Steve Blocka7e24c12009-10-30 11:49:00 +0000360 // Remove caller arguments from the stack and return.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000361 __ PopReturnAddressTo(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +0000362 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000363 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
364 __ PushReturnAddressFrom(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000365 if (create_implicit_receiver) {
366 Counters* counters = masm->isolate()->counters();
367 __ IncrementCounter(counters->constructed_objects(), 1);
368 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000369 __ ret(0);
370}
371
372
Leon Clarkee46be812010-01-19 14:06:41 +0000373void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 Generate_JSConstructStubHelper(masm, false, true);
Leon Clarkee46be812010-01-19 14:06:41 +0000375}
376
377
378void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000379 Generate_JSConstructStubHelper(masm, true, true);
380}
381
382
383void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
384 Generate_JSConstructStubHelper(masm, false, false);
385}
386
387
388void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
389 FrameScope scope(masm, StackFrame::INTERNAL);
390 __ Push(rdi);
391 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
392}
393
394
395enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
396
397
398// Clobbers rcx, r11, kScratchRegister; preserves all other registers.
399static void Generate_CheckStackOverflow(MacroAssembler* masm,
400 IsTagged rax_is_tagged) {
401 // rax : the number of items to be pushed to the stack
402 //
403 // Check the stack for overflow. We are not trying to catch
404 // interruptions (e.g. debug break and preemption) here, so the "real stack
405 // limit" is checked.
406 Label okay;
407 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
408 __ movp(rcx, rsp);
409 // Make rcx the space we have left. The stack might already be overflowed
410 // here which will cause rcx to become negative.
411 __ subp(rcx, kScratchRegister);
412 // Make r11 the space we need for the array when it is unrolled onto the
413 // stack.
414 if (rax_is_tagged == kRaxIsSmiTagged) {
415 __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
416 } else {
417 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
418 __ movp(r11, rax);
419 __ shlq(r11, Immediate(kPointerSizeLog2));
420 }
421 // Check if the arguments will overflow the stack.
422 __ cmpp(rcx, r11);
423 __ j(greater, &okay); // Signed comparison.
424
425 // Out of stack space.
426 __ CallRuntime(Runtime::kThrowStackOverflow);
427
428 __ bind(&okay);
Leon Clarkee46be812010-01-19 14:06:41 +0000429}
430
431
Steve Blocka7e24c12009-10-30 11:49:00 +0000432static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
433 bool is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 ProfileEntryHookStub::MaybeCallEntryHook(masm);
435
Steve Blocka7e24c12009-10-30 11:49:00 +0000436 // Expects five C++ function parameters.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000437 // - Object* new_target
438 // - JSFunction* function
Steve Blocka7e24c12009-10-30 11:49:00 +0000439 // - Object* receiver
440 // - int argc
441 // - Object*** argv
442 // (see Handle::Invoke in execution.cc).
443
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100444 // Open a C++ scope for the FrameScope.
445 {
446 // Platform specific argument handling. After this, the stack contains
447 // an internal frame and the pushed function and receiver, and
448 // register rax and rbx holds the argument count and argument array,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000449 // while rdi holds the function pointer, rsi the context, and rdx the
450 // new.target.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100451
Steve Blocka7e24c12009-10-30 11:49:00 +0000452#ifdef _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453 // MSVC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000454 // rcx : new_target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 // rdx : function
456 // r8 : receiver
457 // r9 : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458 // [rsp+0x20] : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000459
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100460 // Clear the context before we push it when entering the internal frame.
461 __ Set(rsi, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000462
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100463 // Enter an internal frame.
464 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000465
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000466 // Setup the context (we need to use the caller context from the isolate).
467 ExternalReference context_address(Isolate::kContextAddress,
468 masm->isolate());
469 __ movp(rsi, masm->ExternalOperand(context_address));
Steve Blocka7e24c12009-10-30 11:49:00 +0000470
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100471 // Push the function and the receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 __ Push(rdx);
473 __ Push(r8);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100475 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 __ movp(rax, r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100477 // Load the previous frame pointer to access C argument on stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000478 __ movp(kScratchRegister, Operand(rbp, 0));
479 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100480 // Load the function pointer into rdi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 __ movp(rdi, rdx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 // Load the new.target into rdx.
483 __ movp(rdx, rcx);
Steve Block6ded16b2010-05-10 14:33:55 +0100484#else // _WIN64
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100485 // GCC parameters in:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000486 // rdi : new_target
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100487 // rsi : function
488 // rdx : receiver
489 // rcx : argc
490 // r8 : argv
Steve Blocka7e24c12009-10-30 11:49:00 +0000491
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000492 __ movp(r11, rdi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493 __ movp(rdi, rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000495 // r11 : new_target
Steve Blocka7e24c12009-10-30 11:49:00 +0000496
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100497 // Clear the context before we push it when entering the internal frame.
498 __ Set(rsi, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000499
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100500 // Enter an internal frame.
501 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000502
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000503 // Setup the context (we need to use the caller context from the isolate).
504 ExternalReference context_address(Isolate::kContextAddress,
505 masm->isolate());
506 __ movp(rsi, masm->ExternalOperand(context_address));
507
508 // Push the function and receiver onto the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 __ Push(rdi);
510 __ Push(rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +0000511
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100512 // Load the number of arguments and setup pointer to the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513 __ movp(rax, rcx);
514 __ movp(rbx, r8);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515
516 // Load the new.target into rdx.
517 __ movp(rdx, r11);
Steve Blocka7e24c12009-10-30 11:49:00 +0000518#endif // _WIN64
519
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100520 // Current stack contents:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521 // [rsp + 2 * kPointerSize ... ] : Internal frame
522 // [rsp + kPointerSize] : function
523 // [rsp] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100524 // Current register contents:
525 // rax : argc
526 // rbx : argv
527 // rsi : context
528 // rdi : function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000529 // rdx : new.target
530
531 // Check if we have enough stack space to push all arguments.
532 // Expects argument count in rax. Clobbers rcx, r11.
533 Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt);
Steve Blocka7e24c12009-10-30 11:49:00 +0000534
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100535 // Copy arguments to the stack in a loop.
536 // Register rbx points to array of pointers to handle locations.
537 // Push the values of these handles.
538 Label loop, entry;
539 __ Set(rcx, 0); // Set loop variable to 0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 __ jmp(&entry, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100541 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
543 __ Push(Operand(kScratchRegister, 0)); // dereference handle
544 __ addp(rcx, Immediate(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100545 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 __ cmpp(rcx, rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100547 __ j(not_equal, &loop);
Steve Blocka7e24c12009-10-30 11:49:00 +0000548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 // Invoke the builtin code.
550 Handle<Code> builtin = is_construct
551 ? masm->isolate()->builtins()->Construct()
552 : masm->isolate()->builtins()->Call();
553 __ Call(builtin, RelocInfo::CODE_TARGET);
554
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100555 // Exit the internal frame. Notice that this also removes the empty
556 // context and the function left on the stack by the code
557 // invocation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000558 }
559
Steve Blocka7e24c12009-10-30 11:49:00 +0000560 // TODO(X64): Is argument correct? Is there a receiver to remove?
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100561 __ ret(1 * kPointerSize); // Remove receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000562}
563
564
565void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
566 Generate_JSEntryTrampolineHelper(masm, false);
567}
568
569
570void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
571 Generate_JSEntryTrampolineHelper(masm, true);
572}
573
Iain Merrick75681382010-08-19 15:07:18 +0100574
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000575// Generate code for entering a JS function with the interpreter.
576// On entry to the function the receiver and arguments have been pushed on the
577// stack left to right. The actual argument count matches the formal parameter
578// count expected by the function.
579//
580// The live registers are:
581// o rdi: the JS function object being called
582// o rdx: the new target
583// o rsi: our context
584// o rbp: the caller's frame pointer
585// o rsp: stack pointer (pointing to return address)
586//
587// The function builds a JS frame. Please see JavaScriptFrameConstants in
588// frames-x64.h for its layout.
589// TODO(rmcilroy): We will need to include the current bytecode pointer in the
590// frame.
591void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
592 // Open a frame scope to indicate that there is a frame on the stack. The
593 // MANUAL indicates that the scope shouldn't actually generate code to set up
594 // the frame (that is done below).
595 FrameScope frame_scope(masm, StackFrame::MANUAL);
596 __ pushq(rbp); // Caller's frame pointer.
597 __ movp(rbp, rsp);
598 __ Push(rsi); // Callee's context.
599 __ Push(rdi); // Callee's JS function.
600 __ Push(rdx); // Callee's new target.
601
602 // Push zero for bytecode array offset.
603 __ Push(Immediate(0));
604
605 // Get the bytecode array from the function object and load the pointer to the
606 // first entry into edi (InterpreterBytecodeRegister).
607 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
608 __ movp(kInterpreterBytecodeArrayRegister,
609 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
610
611 if (FLAG_debug_code) {
612 // Check function data field is actually a BytecodeArray object.
613 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
614 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
615 rax);
616 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
617 }
618
619 // Allocate the local and temporary register file on the stack.
620 {
621 // Load frame size from the BytecodeArray object.
622 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
623 BytecodeArray::kFrameSizeOffset));
624
625 // Do a stack check to ensure we don't go over the limit.
626 Label ok;
627 __ movp(rdx, rsp);
628 __ subp(rdx, rcx);
629 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
630 __ j(above_equal, &ok, Label::kNear);
631 __ CallRuntime(Runtime::kThrowStackOverflow);
632 __ bind(&ok);
633
634 // If ok, push undefined as the initial value for all register file entries.
635 Label loop_header;
636 Label loop_check;
637 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
638 __ j(always, &loop_check);
639 __ bind(&loop_header);
640 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
641 __ Push(rdx);
642 // Continue loop if not done.
643 __ bind(&loop_check);
644 __ subp(rcx, Immediate(kPointerSize));
645 __ j(greater_equal, &loop_header, Label::kNear);
646 }
647
648 // TODO(rmcilroy): List of things not currently dealt with here but done in
649 // fullcodegen's prologue:
650 // - Support profiler (specifically profiling_counter).
651 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
652 // - Allow simulator stop operations if FLAG_stop_at is set.
653 // - Code aging of the BytecodeArray object.
654
655 // Perform stack guard check.
656 {
657 Label ok;
658 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
659 __ j(above_equal, &ok, Label::kNear);
660 __ Push(kInterpreterBytecodeArrayRegister);
661 __ CallRuntime(Runtime::kStackGuard);
662 __ Pop(kInterpreterBytecodeArrayRegister);
663 __ bind(&ok);
664 }
665
666 // Load accumulator, register file, bytecode offset, dispatch table into
667 // registers.
668 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
669 __ movp(kInterpreterRegisterFileRegister, rbp);
670 __ addp(kInterpreterRegisterFileRegister,
671 Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
672 __ movp(kInterpreterBytecodeOffsetRegister,
673 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
674 __ LoadRoot(kInterpreterDispatchTableRegister,
675 Heap::kInterpreterTableRootIndex);
676 __ addp(kInterpreterDispatchTableRegister,
677 Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
678
679 // Dispatch to the first bytecode handler for the function.
680 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
681 kInterpreterBytecodeOffsetRegister, times_1, 0));
682 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
683 times_pointer_size, 0));
684 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
685 // and header removal.
686 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
687 __ call(rbx);
688}
689
690
691void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
692 // TODO(rmcilroy): List of things not currently dealt with here but done in
693 // fullcodegen's EmitReturnSequence.
694 // - Supporting FLAG_trace for Runtime::TraceExit.
695 // - Support profiler (specifically decrementing profiling_counter
696 // appropriately and calling out to HandleInterrupts if necessary).
697
698 // The return value is in accumulator, which is already in rax.
699
700 // Leave the frame (also dropping the register file).
701 __ leave();
702
703 // Drop receiver + arguments and return.
704 __ movl(rbx, FieldOperand(kInterpreterBytecodeArrayRegister,
705 BytecodeArray::kParameterSizeOffset));
706 __ PopReturnAddressTo(rcx);
707 __ addp(rsp, rbx);
708 __ PushReturnAddressFrom(rcx);
709 __ ret(0);
710}
711
712
713static void Generate_InterpreterPushArgs(MacroAssembler* masm,
714 bool push_receiver) {
715 // ----------- S t a t e -------------
716 // -- rax : the number of arguments (not including the receiver)
717 // -- rbx : the address of the first argument to be pushed. Subsequent
718 // arguments should be consecutive above this, in the same order as
719 // they are to be pushed onto the stack.
720 // -----------------------------------
721
722 // Find the address of the last argument.
723 __ movp(rcx, rax);
724 if (push_receiver) {
725 __ addp(rcx, Immediate(1)); // Add one for receiver.
726 }
727
728 __ shlp(rcx, Immediate(kPointerSizeLog2));
729 __ negp(rcx);
730 __ addp(rcx, rbx);
731
732 // Push the arguments.
733 Label loop_header, loop_check;
734 __ j(always, &loop_check);
735 __ bind(&loop_header);
736 __ Push(Operand(rbx, 0));
737 __ subp(rbx, Immediate(kPointerSize));
738 __ bind(&loop_check);
739 __ cmpp(rbx, rcx);
740 __ j(greater, &loop_header, Label::kNear);
741}
742
743
744// static
745void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
746 // ----------- S t a t e -------------
747 // -- rax : the number of arguments (not including the receiver)
748 // -- rbx : the address of the first argument to be pushed. Subsequent
749 // arguments should be consecutive above this, in the same order as
750 // they are to be pushed onto the stack.
751 // -- rdi : the target to call (can be any Object).
752 // -----------------------------------
753
754 // Pop return address to allow tail-call after pushing arguments.
755 __ PopReturnAddressTo(kScratchRegister);
756
757 Generate_InterpreterPushArgs(masm, true);
758
759 // Call the target.
760 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
761 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
762}
763
764
765// static
766void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
767 // ----------- S t a t e -------------
768 // -- rax : the number of arguments (not including the receiver)
769 // -- rdx : the new target (either the same as the constructor or
770 // the JSFunction on which new was invoked initially)
771 // -- rdi : the constructor to call (can be any Object)
772 // -- rbx : the address of the first argument to be pushed. Subsequent
773 // arguments should be consecutive above this, in the same order as
774 // they are to be pushed onto the stack.
775 // -----------------------------------
776
777 // Pop return address to allow tail-call after pushing arguments.
778 __ PopReturnAddressTo(kScratchRegister);
779
780 // Push slot for the receiver to be constructed.
781 __ Push(Immediate(0));
782
783 Generate_InterpreterPushArgs(masm, false);
784
785 // Push return address in preparation for the tail-call.
786 __ PushReturnAddressFrom(kScratchRegister);
787
788 // Call the constructor (rax, rdx, rdi passed on).
789 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
790}
791
792
793static void Generate_InterpreterNotifyDeoptimizedHelper(
794 MacroAssembler* masm, Deoptimizer::BailoutType type) {
795 // Enter an internal frame.
796 {
797 FrameScope scope(masm, StackFrame::INTERNAL);
798 __ Push(kInterpreterAccumulatorRegister); // Save accumulator register.
799
800 // Pass the deoptimization type to the runtime system.
801 __ Push(Smi::FromInt(static_cast<int>(type)));
802
803 __ CallRuntime(Runtime::kNotifyDeoptimized);
804
805 __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register.
806 // Tear down internal frame.
807 }
808
809 // Drop state (we don't use these for interpreter deopts) and push PC at top
810 // of stack (to simulate initial call to bytecode handler in interpreter entry
811 // trampoline).
812 __ Pop(rbx);
813 __ Drop(1);
814 __ Push(rbx);
815
816 // Initialize register file register and dispatch table register.
817 __ movp(kInterpreterRegisterFileRegister, rbp);
818 __ addp(kInterpreterRegisterFileRegister,
819 Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp));
820 __ LoadRoot(kInterpreterDispatchTableRegister,
821 Heap::kInterpreterTableRootIndex);
822 __ addp(kInterpreterDispatchTableRegister,
823 Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
824
825 // Get the context from the frame.
826 // TODO(rmcilroy): Update interpreter frame to expect current context at the
827 // context slot instead of the function context.
828 __ movp(kContextRegister,
829 Operand(kInterpreterRegisterFileRegister,
830 InterpreterFrameConstants::kContextFromRegisterPointer));
831
832 // Get the bytecode array pointer from the frame.
833 __ movp(rbx,
834 Operand(kInterpreterRegisterFileRegister,
835 InterpreterFrameConstants::kFunctionFromRegisterPointer));
836 __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
837 __ movp(kInterpreterBytecodeArrayRegister,
838 FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
839
840 if (FLAG_debug_code) {
841 // Check function data field is actually a BytecodeArray object.
842 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
843 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
844 rbx);
845 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
846 }
847
848 // Get the target bytecode offset from the frame.
849 __ movp(
850 kInterpreterBytecodeOffsetRegister,
851 Operand(kInterpreterRegisterFileRegister,
852 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
853 __ SmiToInteger32(kInterpreterBytecodeOffsetRegister,
854 kInterpreterBytecodeOffsetRegister);
855
856 // Dispatch to the target bytecode.
857 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
858 kInterpreterBytecodeOffsetRegister, times_1, 0));
859 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
860 times_pointer_size, 0));
861 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
862 __ jmp(rbx);
863}
864
865
866void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
867 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
868}
869
870
871void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
872 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
873}
874
875
876void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
877 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
878}
879
880
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000881void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
882 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
883 GenerateTailCallToReturnedCode(masm);
Iain Merrick75681382010-08-19 15:07:18 +0100884}
885
Ben Murdochb0fe1622011-05-05 13:52:32 +0100886
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000888 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 GenerateTailCallToReturnedCode(masm);
890}
891
892
893void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000894 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000895 GenerateTailCallToReturnedCode(masm);
896}
897
898
899static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
900 // For now, we are relying on the fact that make_code_young doesn't do any
901 // garbage collection which allows us to save/restore the registers without
902 // worrying about which of them contain pointers. We also don't build an
903 // internal frame to make the code faster, since we shouldn't have to do stack
904 // crawls in MakeCodeYoung. This seems a bit fragile.
905
906 // Re-execute the code that was patched back to the young age when
907 // the stub returns.
908 __ subp(Operand(rsp, 0), Immediate(5));
909 __ Pushad();
910 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
911 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
912 { // NOLINT
913 FrameScope scope(masm, StackFrame::MANUAL);
914 __ PrepareCallCFunction(2);
915 __ CallCFunction(
916 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
917 }
918 __ Popad();
919 __ ret(0);
920}
921
922
923#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
924void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
925 MacroAssembler* masm) { \
926 GenerateMakeCodeYoungAgainCommon(masm); \
927} \
928void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
929 MacroAssembler* masm) { \
930 GenerateMakeCodeYoungAgainCommon(masm); \
931}
932CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
933#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
934
935
936void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
937 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
938 // that make_code_young doesn't do any garbage collection which allows us to
939 // save/restore the registers without worrying about which of them contain
940 // pointers.
941 __ Pushad();
942 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
943 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
944 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
945 { // NOLINT
946 FrameScope scope(masm, StackFrame::MANUAL);
947 __ PrepareCallCFunction(2);
948 __ CallCFunction(
949 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
950 2);
951 }
952 __ Popad();
953
954 // Perform prologue operations usually performed by the young code stub.
955 __ PopReturnAddressTo(kScratchRegister);
956 __ pushq(rbp); // Caller's frame pointer.
957 __ movp(rbp, rsp);
958 __ Push(rsi); // Callee's context.
959 __ Push(rdi); // Callee's JS Function.
960 __ PushReturnAddressFrom(kScratchRegister);
961
962 // Jump to point after the code-age stub.
963 __ ret(0);
964}
965
966
967void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
968 GenerateMakeCodeYoungAgainCommon(masm);
969}
970
971
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000972void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
973 Generate_MarkCodeAsExecutedOnce(masm);
974}
975
976
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000977static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
978 SaveFPRegsMode save_doubles) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100979 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100980 {
981 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100982
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000983 // Preserve registers across notification, this is important for compiled
984 // stubs that tail call the runtime on deopts passing their parameters in
985 // registers.
986 __ Pushad();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000987 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000988 __ Popad();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100989 // Tear down internal frame.
990 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100991
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992 __ DropUnderReturnAddress(1); // Ignore state offset
993 __ ret(0); // Return to IC Miss stub, continuation still on stack.
994}
995
996
997void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
998 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
999}
1000
1001
1002void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1003 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001004}
1005
1006
1007static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1008 Deoptimizer::BailoutType type) {
Steve Block1e0659c2011-05-24 12:43:12 +01001009 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001010 {
1011 FrameScope scope(masm, StackFrame::INTERNAL);
Steve Block1e0659c2011-05-24 12:43:12 +01001012
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001013 // Pass the deoptimization type to the runtime system.
1014 __ Push(Smi::FromInt(static_cast<int>(type)));
Steve Block1e0659c2011-05-24 12:43:12 +01001015
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001016 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001017 // Tear down internal frame.
1018 }
Steve Block1e0659c2011-05-24 12:43:12 +01001019
1020 // Get the full codegen state from the stack and untag it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001022
1023 // Switch on the state.
Ben Murdoch257744e2011-11-30 15:57:28 +00001024 Label not_no_registers, not_tos_rax;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001025 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
Ben Murdoch257744e2011-11-30 15:57:28 +00001026 __ j(not_equal, &not_no_registers, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01001027 __ ret(1 * kPointerSize); // Remove state.
1028
1029 __ bind(&not_no_registers);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001030 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
1031 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
Ben Murdoch257744e2011-11-30 15:57:28 +00001032 __ j(not_equal, &not_tos_rax, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01001033 __ ret(2 * kPointerSize); // Remove state, rax.
1034
1035 __ bind(&not_tos_rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001036 __ Abort(kNoCasesLeft);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001037}
1038
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001039
Ben Murdochb0fe1622011-05-05 13:52:32 +01001040void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1041 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1042}
1043
1044
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001045void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1046 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1047}
1048
1049
Ben Murdochb0fe1622011-05-05 13:52:32 +01001050void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Steve Block1e0659c2011-05-24 12:43:12 +01001051 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001052}
1053
1054
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001055// static
1056void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1057 int field_index) {
1058 // ----------- S t a t e -------------
1059 // -- rsp[0] : return address
1060 // -- rsp[8] : receiver
1061 // -----------------------------------
1062
1063 // 1. Load receiver into rax and check that it's actually a JSDate object.
1064 Label receiver_not_date;
1065 {
1066 StackArgumentsAccessor args(rsp, 0);
1067 __ movp(rax, args.GetReceiverOperand());
1068 __ JumpIfSmi(rax, &receiver_not_date);
1069 __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
1070 __ j(not_equal, &receiver_not_date);
1071 }
1072
1073 // 2. Load the specified date field, falling back to the runtime as necessary.
1074 if (field_index == JSDate::kDateValue) {
1075 __ movp(rax, FieldOperand(rax, JSDate::kValueOffset));
1076 } else {
1077 if (field_index < JSDate::kFirstUncachedField) {
1078 Label stamp_mismatch;
1079 __ Load(rdx, ExternalReference::date_cache_stamp(masm->isolate()));
1080 __ cmpp(rdx, FieldOperand(rax, JSDate::kCacheStampOffset));
1081 __ j(not_equal, &stamp_mismatch, Label::kNear);
1082 __ movp(rax, FieldOperand(
1083 rax, JSDate::kValueOffset + field_index * kPointerSize));
1084 __ ret(1 * kPointerSize);
1085 __ bind(&stamp_mismatch);
1086 }
1087 FrameScope scope(masm, StackFrame::INTERNAL);
1088 __ PrepareCallCFunction(2);
1089 __ Move(arg_reg_1, rax);
1090 __ Move(arg_reg_2, Smi::FromInt(field_index));
1091 __ CallCFunction(
1092 ExternalReference::get_date_field_function(masm->isolate()), 2);
1093 }
1094 __ ret(1 * kPointerSize);
1095
1096 // 3. Raise a TypeError if the receiver is not a date.
1097 __ bind(&receiver_not_date);
1098 {
1099 FrameScope scope(masm, StackFrame::MANUAL);
1100 __ EnterFrame(StackFrame::INTERNAL);
1101 __ CallRuntime(Runtime::kThrowNotDateError);
1102 }
1103}
1104
1105
1106// static
1107void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1108 // ----------- S t a t e -------------
1109 // -- rax : argc
1110 // -- rsp[0] : return address
1111 // -- rsp[8] : argArray
1112 // -- rsp[16] : thisArg
1113 // -- rsp[24] : receiver
1114 // -----------------------------------
1115
1116 // 1. Load receiver into rdi, argArray into rax (if present), remove all
1117 // arguments from the stack (including the receiver), and push thisArg (if
1118 // present) instead.
1119 {
1120 Label no_arg_array, no_this_arg;
1121 StackArgumentsAccessor args(rsp, rax);
1122 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1123 __ movp(rbx, rdx);
1124 __ movp(rdi, args.GetReceiverOperand());
1125 __ testp(rax, rax);
1126 __ j(zero, &no_this_arg, Label::kNear);
1127 {
1128 __ movp(rdx, args.GetArgumentOperand(1));
1129 __ cmpp(rax, Immediate(1));
1130 __ j(equal, &no_arg_array, Label::kNear);
1131 __ movp(rbx, args.GetArgumentOperand(2));
1132 __ bind(&no_arg_array);
1133 }
1134 __ bind(&no_this_arg);
1135 __ PopReturnAddressTo(rcx);
1136 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1137 __ Push(rdx);
1138 __ PushReturnAddressFrom(rcx);
1139 __ movp(rax, rbx);
1140 }
1141
1142 // ----------- S t a t e -------------
1143 // -- rax : argArray
1144 // -- rdi : receiver
1145 // -- rsp[0] : return address
1146 // -- rsp[8] : thisArg
1147 // -----------------------------------
1148
1149 // 2. Make sure the receiver is actually callable.
1150 Label receiver_not_callable;
1151 __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear);
1152 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1153 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1154 Immediate(1 << Map::kIsCallable));
1155 __ j(zero, &receiver_not_callable, Label::kNear);
1156
1157 // 3. Tail call with no arguments if argArray is null or undefined.
1158 Label no_arguments;
1159 __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1160 __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments,
1161 Label::kNear);
1162
1163 // 4a. Apply the receiver to the given argArray (passing undefined for
1164 // new.target).
1165 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1166 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1167
1168 // 4b. The argArray is either null or undefined, so we tail call without any
1169 // arguments to the receiver.
1170 __ bind(&no_arguments);
1171 {
1172 __ Set(rax, 0);
1173 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1174 }
1175
1176 // 4c. The receiver is not callable, throw an appropriate TypeError.
1177 __ bind(&receiver_not_callable);
1178 {
1179 StackArgumentsAccessor args(rsp, 0);
1180 __ movp(args.GetReceiverOperand(), rdi);
1181 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1182 }
1183}
1184
1185
1186// static
1187void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001188 // Stack Layout:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001189 // rsp[0] : Return address
1190 // rsp[8] : Argument n
1191 // rsp[16] : Argument n-1
Ben Murdochb0fe1622011-05-05 13:52:32 +01001192 // ...
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001193 // rsp[8 * n] : Argument 1
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 // rsp[8 * (n + 1)] : Receiver (callable to call)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001195 //
1196 // rax contains the number of arguments, n, not counting the receiver.
1197 //
1198 // 1. Make sure we have at least one argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001199 {
1200 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201 __ testp(rax, rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001202 __ j(not_zero, &done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001203 __ PopReturnAddressTo(rbx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001204 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 __ PushReturnAddressFrom(rbx);
1206 __ incp(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001207 __ bind(&done);
1208 }
1209
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001210 // 2. Get the callable to call (passed as receiver) from the stack.
1211 {
1212 StackArgumentsAccessor args(rsp, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001213 __ movp(rdi, args.GetReceiverOperand());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001214 }
1215
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001216 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb0fe1622011-05-05 13:52:32 +01001217 // (overwriting the original receiver). Adjust argument count to make
1218 // the original first argument the new receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001219 {
1220 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221 __ movp(rcx, rax);
1222 StackArgumentsAccessor args(rsp, rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001223 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001224 __ movp(rbx, args.GetArgumentOperand(1));
1225 __ movp(args.GetArgumentOperand(0), rbx);
1226 __ decp(rcx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001227 __ j(not_zero, &loop); // While non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001228 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1229 __ decp(rax); // One fewer argument (first argument is new receiver).
Ben Murdochb0fe1622011-05-05 13:52:32 +01001230 }
1231
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001232 // 4. Call the callable.
1233 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001234}
1235
1236
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001237void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1238 // ----------- S t a t e -------------
1239 // -- rax : argc
1240 // -- rsp[0] : return address
1241 // -- rsp[8] : argumentsList
1242 // -- rsp[16] : thisArgument
1243 // -- rsp[24] : target
1244 // -- rsp[32] : receiver
1245 // -----------------------------------
1246
1247 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1248 // remove all arguments from the stack (including the receiver), and push
1249 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001250 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251 Label done;
1252 StackArgumentsAccessor args(rsp, rax);
1253 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1254 __ movp(rdx, rdi);
1255 __ movp(rbx, rdi);
1256 __ cmpp(rax, Immediate(1));
1257 __ j(below, &done, Label::kNear);
1258 __ movp(rdi, args.GetArgumentOperand(1)); // target
1259 __ j(equal, &done, Label::kNear);
1260 __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument
1261 __ cmpp(rax, Immediate(3));
1262 __ j(below, &done, Label::kNear);
1263 __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList
1264 __ bind(&done);
1265 __ PopReturnAddressTo(rcx);
1266 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1267 __ Push(rdx);
1268 __ PushReturnAddressFrom(rcx);
1269 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001270 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001271
1272 // ----------- S t a t e -------------
1273 // -- rax : argumentsList
1274 // -- rdi : target
1275 // -- rsp[0] : return address
1276 // -- rsp[8] : thisArgument
1277 // -----------------------------------
1278
1279 // 2. Make sure the target is actually callable.
1280 Label target_not_callable;
1281 __ JumpIfSmi(rdi, &target_not_callable, Label::kNear);
1282 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1283 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1284 Immediate(1 << Map::kIsCallable));
1285 __ j(zero, &target_not_callable, Label::kNear);
1286
1287 // 3a. Apply the target to the given argumentsList (passing undefined for
1288 // new.target).
1289 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1290 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1291
1292 // 3b. The target is not callable, throw an appropriate TypeError.
1293 __ bind(&target_not_callable);
1294 {
1295 StackArgumentsAccessor args(rsp, 0);
1296 __ movp(args.GetReceiverOperand(), rdi);
1297 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1298 }
1299}
1300
1301
1302void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1303 // ----------- S t a t e -------------
1304 // -- rax : argc
1305 // -- rsp[0] : return address
1306 // -- rsp[8] : new.target (optional)
1307 // -- rsp[16] : argumentsList
1308 // -- rsp[24] : target
1309 // -- rsp[32] : receiver
1310 // -----------------------------------
1311
1312 // 1. Load target into rdi (if present), argumentsList into rax (if present),
1313 // new.target into rdx (if present, otherwise use target), remove all
1314 // arguments from the stack (including the receiver), and push thisArgument
1315 // (if present) instead.
1316 {
1317 Label done;
1318 StackArgumentsAccessor args(rsp, rax);
1319 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex);
1320 __ movp(rdx, rdi);
1321 __ movp(rbx, rdi);
1322 __ cmpp(rax, Immediate(1));
1323 __ j(below, &done, Label::kNear);
1324 __ movp(rdi, args.GetArgumentOperand(1)); // target
1325 __ movp(rdx, rdi); // new.target defaults to target
1326 __ j(equal, &done, Label::kNear);
1327 __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList
1328 __ cmpp(rax, Immediate(3));
1329 __ j(below, &done, Label::kNear);
1330 __ movp(rdx, args.GetArgumentOperand(3)); // new.target
1331 __ bind(&done);
1332 __ PopReturnAddressTo(rcx);
1333 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1334 __ PushRoot(Heap::kUndefinedValueRootIndex);
1335 __ PushReturnAddressFrom(rcx);
1336 __ movp(rax, rbx);
1337 }
1338
1339 // ----------- S t a t e -------------
1340 // -- rax : argumentsList
1341 // -- rdx : new.target
1342 // -- rdi : target
1343 // -- rsp[0] : return address
1344 // -- rsp[8] : receiver (undefined)
1345 // -----------------------------------
1346
1347 // 2. Make sure the target is actually a constructor.
1348 Label target_not_constructor;
1349 __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear);
1350 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
1351 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1352 Immediate(1 << Map::kIsConstructor));
1353 __ j(zero, &target_not_constructor, Label::kNear);
1354
1355 // 3. Make sure the target is actually a constructor.
1356 Label new_target_not_constructor;
1357 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1358 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1359 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1360 Immediate(1 << Map::kIsConstructor));
1361 __ j(zero, &new_target_not_constructor, Label::kNear);
1362
1363 // 4a. Construct the target with the given new.target and argumentsList.
1364 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1365
1366 // 4b. The target is not a constructor, throw an appropriate TypeError.
1367 __ bind(&target_not_constructor);
1368 {
1369 StackArgumentsAccessor args(rsp, 0);
1370 __ movp(args.GetReceiverOperand(), rdi);
1371 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1372 }
1373
1374 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1375 __ bind(&new_target_not_constructor);
1376 {
1377 StackArgumentsAccessor args(rsp, 0);
1378 __ movp(args.GetReceiverOperand(), rdx);
1379 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1380 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001381}
1382
1383
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001384void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1385 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001386 // -- rax : argc
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001387 // -- rsp[0] : return address
1388 // -- rsp[8] : last argument
1389 // -----------------------------------
1390 Label generic_array_code;
1391
1392 // Get the InternalArray function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001393 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001394
1395 if (FLAG_debug_code) {
1396 // Initial map for the builtin InternalArray functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001397 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001398 // Will both indicate a NULL and a Smi.
1399 STATIC_ASSERT(kSmiTag == 0);
1400 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001401 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001402 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001403 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001404 }
1405
1406 // Run the native code for the InternalArray function called as a normal
1407 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001408 // tail call a stub
1409 InternalArrayConstructorStub stub(masm->isolate());
1410 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001411}
1412
1413
1414void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1415 // ----------- S t a t e -------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001416 // -- rax : argc
Ben Murdochb0fe1622011-05-05 13:52:32 +01001417 // -- rsp[0] : return address
1418 // -- rsp[8] : last argument
1419 // -----------------------------------
1420 Label generic_array_code;
1421
1422 // Get the Array function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001423 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001424
1425 if (FLAG_debug_code) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001426 // Initial map for the builtin Array functions should be maps.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001427 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001428 // Will both indicate a NULL and a Smi.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001429 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001430 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001431 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001432 __ CmpObjectType(rbx, MAP_TYPE, rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001434 }
1435
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001436 __ movp(rdx, rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001437 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001438 // tail call a stub
1439 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1440 ArrayConstructorStub stub(masm->isolate());
1441 __ TailCallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001442}
1443
1444
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001445// static
1446void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001447 // ----------- S t a t e -------------
1448 // -- rax : number of arguments
1449 // -- rdi : constructor function
1450 // -- rsp[0] : return address
1451 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1452 // -- rsp[(argc + 1) * 8] : receiver
1453 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001454
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001455 // 1. Load the first argument into rax and get rid of the rest (including the
1456 // receiver).
1457 Label no_arguments;
1458 {
1459 StackArgumentsAccessor args(rsp, rax);
1460 __ testp(rax, rax);
1461 __ j(zero, &no_arguments, Label::kNear);
1462 __ movp(rbx, args.GetArgumentOperand(1));
1463 __ PopReturnAddressTo(rcx);
1464 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1465 __ PushReturnAddressFrom(rcx);
1466 __ movp(rax, rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001467 }
1468
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001469 // 2a. Convert the first argument to a number.
1470 ToNumberStub stub(masm->isolate());
1471 __ TailCallStub(&stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001472
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473 // 2b. No arguments, return +0 (already in rax).
1474 __ bind(&no_arguments);
1475 __ ret(1 * kPointerSize);
1476}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001477
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001478
1479// static
1480void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001481 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482 // -- rax : number of arguments
1483 // -- rdi : constructor function
1484 // -- rdx : new target
1485 // -- rsp[0] : return address
1486 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1487 // -- rsp[(argc + 1) * 8] : receiver
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001488 // -----------------------------------
1489
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001490 // 1. Make sure we operate in the context of the called function.
1491 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001492
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001493 // 2. Load the first argument into rbx and get rid of the rest (including the
1494 // receiver).
1495 {
1496 StackArgumentsAccessor args(rsp, rax);
1497 Label no_arguments, done;
1498 __ testp(rax, rax);
1499 __ j(zero, &no_arguments, Label::kNear);
1500 __ movp(rbx, args.GetArgumentOperand(1));
1501 __ jmp(&done, Label::kNear);
1502 __ bind(&no_arguments);
1503 __ Move(rbx, Smi::FromInt(0));
1504 __ bind(&done);
1505 __ PopReturnAddressTo(rcx);
1506 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1507 __ PushReturnAddressFrom(rcx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001508 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001509
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001510 // 3. Make sure rbx is a number.
1511 {
1512 Label done_convert;
1513 __ JumpIfSmi(rbx, &done_convert);
1514 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1515 Heap::kHeapNumberMapRootIndex);
1516 __ j(equal, &done_convert);
1517 {
1518 FrameScope scope(masm, StackFrame::INTERNAL);
1519 __ Push(rdx);
1520 __ Push(rdi);
1521 __ Move(rax, rbx);
1522 ToNumberStub stub(masm->isolate());
1523 __ CallStub(&stub);
1524 __ Move(rbx, rax);
1525 __ Pop(rdi);
1526 __ Pop(rdx);
1527 }
1528 __ bind(&done_convert);
1529 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001530
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001531 // 4. Check if new target and constructor differ.
1532 Label new_object;
1533 __ cmpp(rdx, rdi);
1534 __ j(not_equal, &new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001535
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 // 5. Allocate a JSValue wrapper for the number.
1537 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1538 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540 // 6. Fallback to the runtime to create new object.
1541 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001542 {
1543 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 __ Push(rbx); // the first argument
1545 __ Push(rdi); // constructor function
1546 __ Push(rdx); // new target
1547 __ CallRuntime(Runtime::kNewObject);
1548 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001549 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550 __ Ret();
1551}
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001553
1554// static
1555void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1556 // ----------- S t a t e -------------
1557 // -- rax : number of arguments
1558 // -- rdi : constructor function
1559 // -- rsp[0] : return address
1560 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1561 // -- rsp[(argc + 1) * 8] : receiver
1562 // -----------------------------------
1563
1564 // 1. Load the first argument into rax and get rid of the rest (including the
1565 // receiver).
1566 Label no_arguments;
1567 {
1568 StackArgumentsAccessor args(rsp, rax);
1569 __ testp(rax, rax);
1570 __ j(zero, &no_arguments, Label::kNear);
1571 __ movp(rbx, args.GetArgumentOperand(1));
1572 __ PopReturnAddressTo(rcx);
1573 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1574 __ PushReturnAddressFrom(rcx);
1575 __ movp(rax, rbx);
1576 }
1577
1578 // 2a. At least one argument, return rax if it's a string, otherwise
1579 // dispatch to appropriate conversion.
1580 Label to_string, symbol_descriptive_string;
1581 {
1582 __ JumpIfSmi(rax, &to_string, Label::kNear);
1583 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1584 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
1585 __ j(above, &to_string, Label::kNear);
1586 __ j(equal, &symbol_descriptive_string, Label::kNear);
1587 __ Ret();
1588 }
1589
1590 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001591 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592 {
1593 __ LoadRoot(rax, Heap::kempty_stringRootIndex);
1594 __ ret(1 * kPointerSize);
1595 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001596
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001597 // 3a. Convert rax to a string.
1598 __ bind(&to_string);
1599 {
1600 ToStringStub stub(masm->isolate());
1601 __ TailCallStub(&stub);
1602 }
1603
1604 // 3b. Convert symbol in rax to a string.
1605 __ bind(&symbol_descriptive_string);
1606 {
1607 __ PopReturnAddressTo(rcx);
1608 __ Push(rax);
1609 __ PushReturnAddressFrom(rcx);
1610 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1611 }
1612}
1613
1614
1615// static
1616void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1617 // ----------- S t a t e -------------
1618 // -- rax : number of arguments
1619 // -- rdi : constructor function
1620 // -- rdx : new target
1621 // -- rsp[0] : return address
1622 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1623 // -- rsp[(argc + 1) * 8] : receiver
1624 // -----------------------------------
1625
1626 // 1. Make sure we operate in the context of the called function.
1627 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1628
1629 // 2. Load the first argument into rbx and get rid of the rest (including the
1630 // receiver).
1631 {
1632 StackArgumentsAccessor args(rsp, rax);
1633 Label no_arguments, done;
1634 __ testp(rax, rax);
1635 __ j(zero, &no_arguments, Label::kNear);
1636 __ movp(rbx, args.GetArgumentOperand(1));
1637 __ jmp(&done, Label::kNear);
1638 __ bind(&no_arguments);
1639 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1640 __ bind(&done);
1641 __ PopReturnAddressTo(rcx);
1642 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1643 __ PushReturnAddressFrom(rcx);
1644 }
1645
1646 // 3. Make sure rbx is a string.
1647 {
1648 Label convert, done_convert;
1649 __ JumpIfSmi(rbx, &convert, Label::kNear);
1650 __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx);
1651 __ j(below, &done_convert);
1652 __ bind(&convert);
1653 {
1654 FrameScope scope(masm, StackFrame::INTERNAL);
1655 ToStringStub stub(masm->isolate());
1656 __ Push(rdx);
1657 __ Push(rdi);
1658 __ Move(rax, rbx);
1659 __ CallStub(&stub);
1660 __ Move(rbx, rax);
1661 __ Pop(rdi);
1662 __ Pop(rdx);
1663 }
1664 __ bind(&done_convert);
1665 }
1666
1667 // 4. Check if new target and constructor differ.
1668 Label new_object;
1669 __ cmpp(rdx, rdi);
1670 __ j(not_equal, &new_object);
1671
1672 // 5. Allocate a JSValue wrapper for the string.
1673 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object);
1674 __ Ret();
1675
1676 // 6. Fallback to the runtime to create new object.
1677 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001678 {
1679 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001680 __ Push(rbx); // the first argument
1681 __ Push(rdi); // constructor function
1682 __ Push(rdx); // new target
1683 __ CallRuntime(Runtime::kNewObject);
1684 __ Pop(FieldOperand(rax, JSValue::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001685 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686 __ Ret();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001687}
1688
1689
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1691 Label* stack_overflow) {
1692 // ----------- S t a t e -------------
1693 // -- rax : actual number of arguments
1694 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695 // -- rdx : new target (passed through to callee)
1696 // -- rdi : function (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001697 // -----------------------------------
1698 // Check the stack for overflow. We are not trying to catch
1699 // interruptions (e.g. debug break and preemption) here, so the "real stack
1700 // limit" is checked.
1701 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001702 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001703 __ movp(rcx, rsp);
1704 // Make rcx the space we have left. The stack might already be overflowed
1705 // here which will cause rcx to become negative.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706 __ subp(rcx, r8);
1707 // Make r8 the space we need for the array when it is unrolled onto the
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001708 // stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 __ movp(r8, rbx);
1710 __ shlp(r8, Immediate(kPointerSizeLog2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001711 // Check if the arguments will overflow the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001712 __ cmpp(rcx, r8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713 __ j(less_equal, stack_overflow); // Signed comparison.
1714}
1715
1716
Ben Murdochb0fe1622011-05-05 13:52:32 +01001717static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001718 __ pushq(rbp);
1719 __ movp(rbp, rsp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001720
1721 // Store the arguments adaptor context sentinel.
1722 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1723
1724 // Push the function on the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001725 __ Push(rdi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001726
Ben Murdoch257744e2011-11-30 15:57:28 +00001727 // Preserve the number of arguments on the stack. Must preserve rax,
1728 // rbx and rcx because these registers are used when copying the
Ben Murdochb0fe1622011-05-05 13:52:32 +01001729 // arguments and the receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00001730 __ Integer32ToSmi(r8, rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001731 __ Push(r8);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001732}
1733
1734
1735static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1736 // Retrieve the number of arguments from the stack. Number is a Smi.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001737 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001738
1739 // Leave the frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740 __ movp(rsp, rbp);
1741 __ popq(rbp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001742
1743 // Remove caller arguments from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001744 __ PopReturnAddressTo(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001745 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001746 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1747 __ PushReturnAddressFrom(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001748}
1749
1750
1751void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1752 // ----------- S t a t e -------------
1753 // -- rax : actual number of arguments
1754 // -- rbx : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755 // -- rdx : new target (passed through to callee)
1756 // -- rdi : function (passed through to callee)
Ben Murdochb0fe1622011-05-05 13:52:32 +01001757 // -----------------------------------
1758
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759 Label invoke, dont_adapt_arguments, stack_overflow;
Steve Block44f0eee2011-05-26 01:26:41 +01001760 Counters* counters = masm->isolate()->counters();
1761 __ IncrementCounter(counters->arguments_adaptors(), 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001762
1763 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001764 __ cmpp(rax, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001765 __ j(less, &too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001766 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001767 __ j(equal, &dont_adapt_arguments);
1768
1769 { // Enough parameters: Actual >= expected.
1770 __ bind(&enough);
1771 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001772 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001773
1774 // Copy receiver and all expected arguments.
1775 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001777 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001778
1779 Label copy;
1780 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001781 __ incp(r8);
1782 __ Push(Operand(rax, 0));
1783 __ subp(rax, Immediate(kPointerSize));
1784 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001785 __ j(less, &copy);
1786 __ jmp(&invoke);
1787 }
1788
1789 { // Too few parameters: Actual < expected.
1790 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791
1792 // If the function is strong we need to throw an error.
1793 Label no_strong_error;
1794 __ movp(kScratchRegister,
1795 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1796 __ testb(FieldOperand(kScratchRegister,
1797 SharedFunctionInfo::kStrongModeByteOffset),
1798 Immediate(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
1799 __ j(equal, &no_strong_error, Label::kNear);
1800
1801 // What we really care about is the required number of arguments.
1802
1803 if (kPointerSize == kInt32Size) {
1804 __ movp(
1805 kScratchRegister,
1806 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1807 __ SmiToInteger32(kScratchRegister, kScratchRegister);
1808 } else {
1809 // See comment near kLengthOffset in src/objects.h
1810 __ movsxlq(
1811 kScratchRegister,
1812 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1813 __ shrq(kScratchRegister, Immediate(1));
1814 }
1815
1816 __ cmpp(rax, kScratchRegister);
1817 __ j(greater_equal, &no_strong_error, Label::kNear);
1818
1819 {
1820 FrameScope frame(masm, StackFrame::MANUAL);
1821 EnterArgumentsAdaptorFrame(masm);
1822 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
1823 }
1824
1825 __ bind(&no_strong_error);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001826 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001828
1829 // Copy receiver and all actual arguments.
1830 const int offset = StandardFrameConstants::kCallerSPOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001832 __ Set(r8, -1); // account for receiver
Ben Murdochb0fe1622011-05-05 13:52:32 +01001833
1834 Label copy;
1835 __ bind(&copy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 __ incp(r8);
1837 __ Push(Operand(rdi, 0));
1838 __ subp(rdi, Immediate(kPointerSize));
1839 __ cmpp(r8, rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001840 __ j(less, &copy);
1841
1842 // Fill remaining expected arguments with undefined values.
1843 Label fill;
1844 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1845 __ bind(&fill);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001846 __ incp(r8);
1847 __ Push(kScratchRegister);
1848 __ cmpp(r8, rbx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001849 __ j(less, &fill);
1850
1851 // Restore function pointer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001853 }
1854
1855 // Call the entry point.
1856 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001857 __ movp(rax, rbx);
1858 // rax : expected number of arguments
1859 // rdx : new target (passed through to callee)
1860 // rdi : function (passed through to callee)
1861 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1862 __ call(rcx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001863
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001864 // Store offset of return address for deoptimizer.
1865 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1866
Ben Murdochb0fe1622011-05-05 13:52:32 +01001867 // Leave frame and return.
1868 LeaveArgumentsAdaptorFrame(masm);
1869 __ ret(0);
1870
1871 // -------------------------------------------
1872 // Dont adapt arguments.
1873 // -------------------------------------------
1874 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001875 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1876 __ jmp(rcx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001877
1878 __ bind(&stack_overflow);
1879 {
1880 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001881 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001882 __ int3();
1883 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001884}
1885
1886
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001887// static
1888void Builtins::Generate_Apply(MacroAssembler* masm) {
1889 // ----------- S t a t e -------------
1890 // -- rax : argumentsList
1891 // -- rdi : target
1892 // -- rdx : new.target (checked to be constructor or undefined)
1893 // -- rsp[0] : return address.
1894 // -- rsp[8] : thisArgument
1895 // -----------------------------------
1896
1897 // Create the list of arguments from the array-like argumentsList.
1898 {
1899 Label create_arguments, create_array, create_runtime, done_create;
1900 __ JumpIfSmi(rax, &create_runtime);
1901
1902 // Load the map of argumentsList into rcx.
1903 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1904
1905 // Load native context into rbx.
1906 __ movp(rbx, NativeContextOperand());
1907
1908 // Check if argumentsList is an (unmodified) arguments object.
1909 __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1910 __ j(equal, &create_arguments);
1911 __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX));
1912 __ j(equal, &create_arguments);
1913
1914 // Check if argumentsList is a fast JSArray.
1915 __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
1916 __ j(equal, &create_array);
1917
1918 // Ask the runtime to create the list (actually a FixedArray).
1919 __ bind(&create_runtime);
1920 {
1921 FrameScope scope(masm, StackFrame::INTERNAL);
1922 __ Push(rdi);
1923 __ Push(rdx);
1924 __ Push(rax);
1925 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1926 __ Pop(rdx);
1927 __ Pop(rdi);
1928 __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset));
1929 }
1930 __ jmp(&done_create);
1931
1932 // Try to create the list from an arguments object.
1933 __ bind(&create_arguments);
1934 __ movp(rbx,
1935 FieldOperand(rax, JSObject::kHeaderSize +
1936 Heap::kArgumentsLengthIndex * kPointerSize));
1937 __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset));
1938 __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
1939 __ j(not_equal, &create_runtime);
1940 __ SmiToInteger32(rbx, rbx);
1941 __ movp(rax, rcx);
1942 __ jmp(&done_create);
1943
1944 // Try to create the list from a JSArray object.
1945 __ bind(&create_array);
1946 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
1947 __ DecodeField<Map::ElementsKindBits>(rcx);
1948 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1949 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1950 STATIC_ASSERT(FAST_ELEMENTS == 2);
1951 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
1952 __ j(above, &create_runtime);
1953 __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
1954 __ j(equal, &create_runtime);
1955 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset));
1956 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset));
1957
1958 __ bind(&done_create);
1959 }
1960
1961 // Check for stack overflow.
1962 {
1963 // Check the stack for overflow. We are not trying to catch interruptions
1964 // (i.e. debug break and preemption) here, so check the "real stack limit".
1965 Label done;
1966 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
1967 __ movp(rcx, rsp);
1968 // Make rcx the space we have left. The stack might already be overflowed
1969 // here which will cause rcx to become negative.
1970 __ subp(rcx, kScratchRegister);
1971 __ sarp(rcx, Immediate(kPointerSizeLog2));
1972 // Check if the arguments will overflow the stack.
1973 __ cmpp(rcx, rbx);
1974 __ j(greater, &done, Label::kNear); // Signed comparison.
1975 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1976 __ bind(&done);
1977 }
1978
1979 // ----------- S t a t e -------------
1980 // -- rdi : target
1981 // -- rax : args (a FixedArray built from argumentsList)
1982 // -- rbx : len (number of elements to push from args)
1983 // -- rdx : new.target (checked to be constructor or undefined)
1984 // -- rsp[0] : return address.
1985 // -- rsp[8] : thisArgument
1986 // -----------------------------------
1987
1988 // Push arguments onto the stack (thisArgument is already on the stack).
1989 {
1990 __ PopReturnAddressTo(r8);
1991 __ Set(rcx, 0);
1992 Label done, loop;
1993 __ bind(&loop);
1994 __ cmpl(rcx, rbx);
1995 __ j(equal, &done, Label::kNear);
1996 __ Push(
1997 FieldOperand(rax, rcx, times_pointer_size, FixedArray::kHeaderSize));
1998 __ incl(rcx);
1999 __ jmp(&loop);
2000 __ bind(&done);
2001 __ PushReturnAddressFrom(r8);
2002 __ Move(rax, rcx);
2003 }
2004
2005 // Dispatch to Call or Construct depending on whether new.target is undefined.
2006 {
2007 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2008 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2009 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2010 }
2011}
2012
2013
2014// static
2015void Builtins::Generate_CallFunction(MacroAssembler* masm,
2016 ConvertReceiverMode mode) {
2017 // ----------- S t a t e -------------
2018 // -- rax : the number of arguments (not including the receiver)
2019 // -- rdi : the function to call (checked to be a JSFunction)
2020 // -----------------------------------
2021 StackArgumentsAccessor args(rsp, rax);
2022 __ AssertFunction(rdi);
2023
2024 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2025 // Check that the function is not a "classConstructor".
2026 Label class_constructor;
2027 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2028 __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset),
2029 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2030 __ j(not_zero, &class_constructor);
2031
2032 // ----------- S t a t e -------------
2033 // -- rax : the number of arguments (not including the receiver)
2034 // -- rdx : the shared function info.
2035 // -- rdi : the function to call (checked to be a JSFunction)
2036 // -----------------------------------
2037
2038 // Enter the context of the function; ToObject has to run in the function
2039 // context, and we also need to take the global proxy from the function
2040 // context in case of conversion.
2041 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2042 SharedFunctionInfo::kStrictModeByteOffset);
2043 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2044 // We need to convert the receiver for non-native sloppy mode functions.
2045 Label done_convert;
2046 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
2047 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2048 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2049 __ j(not_zero, &done_convert);
2050 {
2051 // ----------- S t a t e -------------
2052 // -- rax : the number of arguments (not including the receiver)
2053 // -- rdx : the shared function info.
2054 // -- rdi : the function to call (checked to be a JSFunction)
2055 // -- rsi : the function context.
2056 // -----------------------------------
2057
2058 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2059 // Patch receiver to global proxy.
2060 __ LoadGlobalProxy(rcx);
2061 } else {
2062 Label convert_to_object, convert_receiver;
2063 __ movp(rcx, args.GetReceiverOperand());
2064 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
2065 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2066 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
2067 __ j(above_equal, &done_convert);
2068 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2069 Label convert_global_proxy;
2070 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex,
2071 &convert_global_proxy, Label::kNear);
2072 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
2073 Label::kNear);
2074 __ bind(&convert_global_proxy);
2075 {
2076 // Patch receiver to global proxy.
2077 __ LoadGlobalProxy(rcx);
2078 }
2079 __ jmp(&convert_receiver);
2080 }
2081 __ bind(&convert_to_object);
2082 {
2083 // Convert receiver using ToObject.
2084 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2085 // in the fast case? (fall back to AllocateInNewSpace?)
2086 FrameScope scope(masm, StackFrame::INTERNAL);
2087 __ Integer32ToSmi(rax, rax);
2088 __ Push(rax);
2089 __ Push(rdi);
2090 __ movp(rax, rcx);
2091 ToObjectStub stub(masm->isolate());
2092 __ CallStub(&stub);
2093 __ movp(rcx, rax);
2094 __ Pop(rdi);
2095 __ Pop(rax);
2096 __ SmiToInteger32(rax, rax);
2097 }
2098 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2099 __ bind(&convert_receiver);
2100 }
2101 __ movp(args.GetReceiverOperand(), rcx);
2102 }
2103 __ bind(&done_convert);
2104
2105 // ----------- S t a t e -------------
2106 // -- rax : the number of arguments (not including the receiver)
2107 // -- rdx : the shared function info.
2108 // -- rdi : the function to call (checked to be a JSFunction)
2109 // -- rsi : the function context.
2110 // -----------------------------------
2111
2112 __ LoadSharedFunctionInfoSpecialField(
2113 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
2114 ParameterCount actual(rax);
2115 ParameterCount expected(rbx);
2116
2117 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION,
2118 CheckDebugStepCallWrapper());
2119
2120 // The function is a "classConstructor", need to raise an exception.
2121 __ bind(&class_constructor);
2122 {
2123 FrameScope frame(masm, StackFrame::INTERNAL);
2124 __ Push(rdi);
2125 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2126 }
2127}
2128
2129
2130namespace {
2131
2132void Generate_PushBoundArguments(MacroAssembler* masm) {
2133 // ----------- S t a t e -------------
2134 // -- rax : the number of arguments (not including the receiver)
2135 // -- rdx : new.target (only in case of [[Construct]])
2136 // -- rdi : target (checked to be a JSBoundFunction)
2137 // -----------------------------------
2138
2139 // Load [[BoundArguments]] into rcx and length of that into rbx.
2140 Label no_bound_arguments;
2141 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2142 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2143 __ testl(rbx, rbx);
2144 __ j(zero, &no_bound_arguments);
2145 {
2146 // ----------- S t a t e -------------
2147 // -- rax : the number of arguments (not including the receiver)
2148 // -- rdx : new.target (only in case of [[Construct]])
2149 // -- rdi : target (checked to be a JSBoundFunction)
2150 // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2151 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2152 // -----------------------------------
2153
2154 // Reserve stack space for the [[BoundArguments]].
2155 {
2156 Label done;
2157 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2158 __ subp(rsp, kScratchRegister);
2159 // Check the stack for overflow. We are not trying to catch interruptions
2160 // (i.e. debug break and preemption) here, so check the "real stack
2161 // limit".
2162 __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex);
2163 __ j(greater, &done, Label::kNear); // Signed comparison.
2164 // Restore the stack pointer.
2165 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2166 {
2167 FrameScope scope(masm, StackFrame::MANUAL);
2168 __ EnterFrame(StackFrame::INTERNAL);
2169 __ CallRuntime(Runtime::kThrowStackOverflow);
2170 }
2171 __ bind(&done);
2172 }
2173
2174 // Adjust effective number of arguments to include return address.
2175 __ incl(rax);
2176
2177 // Relocate arguments and return address down the stack.
2178 {
2179 Label loop;
2180 __ Set(rcx, 0);
2181 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2182 __ bind(&loop);
2183 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2184 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2185 __ incl(rcx);
2186 __ cmpl(rcx, rax);
2187 __ j(less, &loop);
2188 }
2189
2190 // Copy [[BoundArguments]] to the stack (below the arguments).
2191 {
2192 Label loop;
2193 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2194 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2195 __ bind(&loop);
2196 __ decl(rbx);
2197 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2198 FixedArray::kHeaderSize));
2199 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2200 __ leal(rax, Operand(rax, 1));
2201 __ j(greater, &loop);
2202 }
2203
2204 // Adjust effective number of arguments (rax contains the number of
2205 // arguments from the call plus return address plus the number of
2206 // [[BoundArguments]]), so we need to subtract one for the return address.
2207 __ decl(rax);
2208 }
2209 __ bind(&no_bound_arguments);
2210}
2211
2212} // namespace
2213
2214
2215// static
2216void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
2217 // ----------- S t a t e -------------
2218 // -- rax : the number of arguments (not including the receiver)
2219 // -- rdi : the function to call (checked to be a JSBoundFunction)
2220 // -----------------------------------
2221 __ AssertBoundFunction(rdi);
2222
2223 // Patch the receiver to [[BoundThis]].
2224 StackArgumentsAccessor args(rsp, rax);
2225 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2226 __ movp(args.GetReceiverOperand(), rbx);
2227
2228 // Push the [[BoundArguments]] onto the stack.
2229 Generate_PushBoundArguments(masm);
2230
2231 // Call the [[BoundTargetFunction]] via the Call builtin.
2232 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2233 __ Load(rcx,
2234 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2235 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2236 __ jmp(rcx);
2237}
2238
2239
2240// static
2241void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2242 // ----------- S t a t e -------------
2243 // -- rax : the number of arguments (not including the receiver)
2244 // -- rdi : the target to call (can be any Object)
2245 // -----------------------------------
2246 StackArgumentsAccessor args(rsp, rax);
2247
2248 Label non_callable, non_function, non_smi;
2249 __ JumpIfSmi(rdi, &non_callable);
2250 __ bind(&non_smi);
2251 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2252 __ j(equal, masm->isolate()->builtins()->CallFunction(mode),
2253 RelocInfo::CODE_TARGET);
2254 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2255 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(),
2256 RelocInfo::CODE_TARGET);
2257 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2258 __ j(not_equal, &non_function);
2259
2260 // 1. Runtime fallback for Proxy [[Call]].
2261 __ PopReturnAddressTo(kScratchRegister);
2262 __ Push(rdi);
2263 __ PushReturnAddressFrom(kScratchRegister);
2264 // Increase the arguments size to include the pushed function and the
2265 // existing receiver on the stack.
2266 __ addp(rax, Immediate(2));
2267 // Tail-call to the runtime.
2268 __ JumpToExternalReference(
2269 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2270
2271 // 2. Call to something else, which might have a [[Call]] internal method (if
2272 // not we raise an exception).
2273 __ bind(&non_function);
2274 // Check if target has a [[Call]] internal method.
2275 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2276 Immediate(1 << Map::kIsCallable));
2277 __ j(zero, &non_callable, Label::kNear);
2278 // Overwrite the original receiver with the (original) target.
2279 __ movp(args.GetReceiverOperand(), rdi);
2280 // Let the "call_as_function_delegate" take care of the rest.
2281 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2282 __ Jump(masm->isolate()->builtins()->CallFunction(
2283 ConvertReceiverMode::kNotNullOrUndefined),
2284 RelocInfo::CODE_TARGET);
2285
2286 // 3. Call to something that is not callable.
2287 __ bind(&non_callable);
2288 {
2289 FrameScope scope(masm, StackFrame::INTERNAL);
2290 __ Push(rdi);
2291 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2292 }
2293}
2294
2295
2296// static
2297void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2298 // ----------- S t a t e -------------
2299 // -- rax : the number of arguments (not including the receiver)
2300 // -- rdx : the new target (checked to be a constructor)
2301 // -- rdi : the constructor to call (checked to be a JSFunction)
2302 // -----------------------------------
2303 __ AssertFunction(rdi);
2304
2305 // Calling convention for function specific ConstructStubs require
2306 // rbx to contain either an AllocationSite or undefined.
2307 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2308
2309 // Tail call to the function-specific construct stub (still in the caller
2310 // context at this point).
2311 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2312 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
2313 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2314 __ jmp(rcx);
2315}
2316
2317
2318// static
2319void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2320 // ----------- S t a t e -------------
2321 // -- rax : the number of arguments (not including the receiver)
2322 // -- rdx : the new target (checked to be a constructor)
2323 // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2324 // -----------------------------------
2325 __ AssertBoundFunction(rdi);
2326
2327 // Push the [[BoundArguments]] onto the stack.
2328 Generate_PushBoundArguments(masm);
2329
2330 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2331 {
2332 Label done;
2333 __ cmpp(rdi, rdx);
2334 __ j(not_equal, &done, Label::kNear);
2335 __ movp(rdx,
2336 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2337 __ bind(&done);
2338 }
2339
2340 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2341 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2342 __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate()));
2343 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
2344 __ jmp(rcx);
2345}
2346
2347
2348// static
2349void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2350 // ----------- S t a t e -------------
2351 // -- rax : the number of arguments (not including the receiver)
2352 // -- rdi : the constructor to call (checked to be a JSProxy)
2353 // -- rdx : the new target (either the same as the constructor or
2354 // the JSFunction on which new was invoked initially)
2355 // -----------------------------------
2356
2357 // Call into the Runtime for Proxy [[Construct]].
2358 __ PopReturnAddressTo(kScratchRegister);
2359 __ Push(rdi);
2360 __ Push(rdx);
2361 __ PushReturnAddressFrom(kScratchRegister);
2362 // Include the pushed new_target, constructor and the receiver.
2363 __ addp(rax, Immediate(3));
2364 __ JumpToExternalReference(
2365 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2366}
2367
2368
2369// static
2370void Builtins::Generate_Construct(MacroAssembler* masm) {
2371 // ----------- S t a t e -------------
2372 // -- rax : the number of arguments (not including the receiver)
2373 // -- rdx : the new target (either the same as the constructor or
2374 // the JSFunction on which new was invoked initially)
2375 // -- rdi : the constructor to call (can be any Object)
2376 // -----------------------------------
2377 StackArgumentsAccessor args(rsp, rax);
2378
2379 // Check if target is a Smi.
2380 Label non_constructor;
2381 __ JumpIfSmi(rdi, &non_constructor, Label::kNear);
2382
2383 // Dispatch based on instance type.
2384 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2385 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2386 RelocInfo::CODE_TARGET);
2387
2388 // Check if target has a [[Construct]] internal method.
2389 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2390 Immediate(1 << Map::kIsConstructor));
2391 __ j(zero, &non_constructor, Label::kNear);
2392
2393 // Only dispatch to bound functions after checking whether they are
2394 // constructors.
2395 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2396 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2397 RelocInfo::CODE_TARGET);
2398
2399 // Only dispatch to proxies after checking whether they are constructors.
2400 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2401 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2402 RelocInfo::CODE_TARGET);
2403
2404 // Called Construct on an exotic Object with a [[Construct]] internal method.
2405 {
2406 // Overwrite the original receiver with the (original) target.
2407 __ movp(args.GetReceiverOperand(), rdi);
2408 // Let the "call_as_constructor_delegate" take care of the rest.
2409 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2410 __ Jump(masm->isolate()->builtins()->CallFunction(),
2411 RelocInfo::CODE_TARGET);
2412 }
2413
2414 // Called Construct on an Object that doesn't have a [[Construct]] internal
2415 // method.
2416 __ bind(&non_constructor);
2417 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2418 RelocInfo::CODE_TARGET);
2419}
2420
2421
2422static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2423 Register function_template_info,
2424 Register scratch0, Register scratch1,
2425 Register scratch2,
2426 Label* receiver_check_failed) {
2427 Register signature = scratch0;
2428 Register map = scratch1;
2429 Register constructor = scratch2;
2430
2431 // If there is no signature, return the holder.
2432 __ movp(signature, FieldOperand(function_template_info,
2433 FunctionTemplateInfo::kSignatureOffset));
2434 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
2435 Label receiver_check_passed;
2436 __ j(equal, &receiver_check_passed, Label::kNear);
2437
2438 // Walk the prototype chain.
2439 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
2440 Label prototype_loop_start;
2441 __ bind(&prototype_loop_start);
2442
2443 // Get the constructor, if any.
2444 __ GetMapConstructor(constructor, map, kScratchRegister);
2445 __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE);
2446 Label next_prototype;
2447 __ j(not_equal, &next_prototype, Label::kNear);
2448
2449 // Get the constructor's signature.
2450 Register type = constructor;
2451 __ movp(type,
2452 FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
2453 __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset));
2454
2455 // Loop through the chain of inheriting function templates.
2456 Label function_template_loop;
2457 __ bind(&function_template_loop);
2458
2459 // If the signatures match, we have a compatible receiver.
2460 __ cmpp(signature, type);
2461 __ j(equal, &receiver_check_passed, Label::kNear);
2462
2463 // If the current type is not a FunctionTemplateInfo, load the next prototype
2464 // in the chain.
2465 __ JumpIfSmi(type, &next_prototype, Label::kNear);
2466 __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister);
2467 __ j(not_equal, &next_prototype, Label::kNear);
2468
2469 // Otherwise load the parent function template and iterate.
2470 __ movp(type,
2471 FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
2472 __ jmp(&function_template_loop, Label::kNear);
2473
2474 // Load the next prototype.
2475 __ bind(&next_prototype);
2476 __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset));
2477 // End if the prototype is null or not hidden.
2478 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2479 __ j(equal, receiver_check_failed);
2480 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset));
2481 __ testq(FieldOperand(map, Map::kBitField3Offset),
2482 Immediate(Map::IsHiddenPrototype::kMask));
2483 __ j(zero, receiver_check_failed);
2484 // Iterate.
2485 __ jmp(&prototype_loop_start, Label::kNear);
2486
2487 __ bind(&receiver_check_passed);
2488}
2489
2490
2491void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
2492 // ----------- S t a t e -------------
2493 // -- rax : number of arguments (not including the receiver)
2494 // -- rdi : callee
2495 // -- rsi : context
2496 // -- rsp[0] : return address
2497 // -- rsp[8] : last argument
2498 // -- ...
2499 // -- rsp[rax * 8] : first argument
2500 // -- rsp[(rax + 1) * 8] : receiver
2501 // -----------------------------------
2502
2503 StackArgumentsAccessor args(rsp, rax);
2504
2505 // Load the FunctionTemplateInfo.
2506 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2507 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
2508
2509 // Do the compatible receiver check.
2510 Label receiver_check_failed;
2511 __ movp(rcx, args.GetReceiverOperand());
2512 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed);
2513
2514 // Get the callback offset from the FunctionTemplateInfo, and jump to the
2515 // beginning of the code.
2516 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset));
2517 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset));
2518 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2519 __ jmp(rdx);
2520
2521 // Compatible receiver check failed: pop return address, arguments and
2522 // receiver and throw an Illegal Invocation exception.
2523 __ bind(&receiver_check_failed);
2524 __ PopReturnAddressTo(rbx);
2525 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize));
2526 __ addp(rsp, rax);
2527 __ PushReturnAddressFrom(rbx);
2528 {
2529 FrameScope scope(masm, StackFrame::INTERNAL);
2530 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
2531 }
2532}
2533
2534
Ben Murdochb0fe1622011-05-05 13:52:32 +01002535void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002536 // Lookup the function in the JavaScript frame.
2537 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002538 {
2539 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002540 // Pass function as argument.
2541 __ Push(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002542 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002543 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002544
Ben Murdoch257744e2011-11-30 15:57:28 +00002545 Label skip;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002546 // If the code object is null, just return to the unoptimized code.
2547 __ cmpp(rax, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002548 __ j(not_equal, &skip, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002549 __ ret(0);
2550
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002551 __ bind(&skip);
2552
2553 // Load deoptimization data from the code object.
2554 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2555
2556 // Load the OSR entrypoint offset from the deoptimization data.
2557 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
2558 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
2559
2560 // Compute the target address = code_obj + header_size + osr_offset
2561 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2562
2563 // Overwrite the return address on the stack.
2564 __ movq(StackOperandForReturnAddress(0), rax);
2565
2566 // And "return" to the OSR entry point of the function.
2567 __ ret(0);
2568}
2569
2570
2571void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
2572 // We check the stack limit as indicator that recompilation might be done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002573 Label ok;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002574 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002575 __ j(above_equal, &ok);
2576 {
2577 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002578 __ CallRuntime(Runtime::kStackGuard);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002579 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002580 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
2581 RelocInfo::CODE_TARGET);
2582
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002583 __ bind(&ok);
2584 __ ret(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002585}
2586
2587
2588#undef __
2589
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002590} // namespace internal
2591} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01002592
2593#endif // V8_TARGET_ARCH_X64