blob: 12b52c123cfd8c83c33319366c1bdd79363ffaa7 [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_S390
6
7#include "src/codegen.h"
8#include "src/debug/debug.h"
9#include "src/deoptimizer.h"
10#include "src/full-codegen/full-codegen.h"
11#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16#define __ ACCESS_MASM(masm)
17
18void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
19 BuiltinExtraArguments extra_args) {
20 // ----------- S t a t e -------------
21 // -- r2 : number of arguments excluding receiver
22 // -- r3 : target
23 // -- r5 : new.target
24 // -- sp[0] : last argument
25 // -- ...
26 // -- sp[4 * (argc - 1)] : first argument
27 // -- sp[4 * argc] : receiver
28 // -----------------------------------
29 __ AssertFunction(r3);
30
31 // Make sure we operate in the context of the called function (for example
32 // ConstructStubs implemented in C++ will be run in the context of the caller
33 // instead of the callee, due to the way that [[Construct]] is defined for
34 // ordinary functions).
35 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
36
37 // Insert extra arguments.
38 int num_extra_args = 0;
39 switch (extra_args) {
40 case BuiltinExtraArguments::kTarget:
41 __ Push(r3);
42 ++num_extra_args;
43 break;
44 case BuiltinExtraArguments::kNewTarget:
45 __ Push(r5);
46 ++num_extra_args;
47 break;
48 case BuiltinExtraArguments::kTargetAndNewTarget:
49 __ Push(r3, r5);
50 num_extra_args += 2;
51 break;
52 case BuiltinExtraArguments::kNone:
53 break;
54 }
55
56 // JumpToExternalReference expects r2 to contain the number of arguments
57 // including the receiver and the extra arguments.
58 __ AddP(r2, r2, Operand(num_extra_args + 1));
59
60 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
61}
62
63// Load the built-in InternalArray function from the current context.
64static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
65 Register result) {
66 // Load the InternalArray function from the current native context.
67 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
68}
69
70// Load the built-in Array function from the current context.
71static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
72 // Load the Array function from the current native context.
73 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
74}
75
76void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
77 // ----------- S t a t e -------------
78 // -- r2 : number of arguments
79 // -- lr : return address
80 // -- sp[...]: constructor arguments
81 // -----------------------------------
82 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
83
84 // Get the InternalArray function.
85 GenerateLoadInternalArrayFunction(masm, r3);
86
87 if (FLAG_debug_code) {
88 // Initial map for the builtin InternalArray functions should be maps.
89 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
90 __ TestIfSmi(r4);
91 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
92 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
93 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
94 }
95
96 // Run the native code for the InternalArray function called as a normal
97 // function.
98 // tail call a stub
99 InternalArrayConstructorStub stub(masm->isolate());
100 __ TailCallStub(&stub);
101}
102
103void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
104 // ----------- S t a t e -------------
105 // -- r2 : number of arguments
106 // -- lr : return address
107 // -- sp[...]: constructor arguments
108 // -----------------------------------
109 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
110
111 // Get the Array function.
112 GenerateLoadArrayFunction(masm, r3);
113
114 if (FLAG_debug_code) {
115 // Initial map for the builtin Array functions should be maps.
116 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
117 __ TestIfSmi(r4);
118 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
119 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
120 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
121 }
122
123 __ LoadRR(r5, r3);
124 // Run the native code for the Array function called as a normal function.
125 // tail call a stub
126 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
127 ArrayConstructorStub stub(masm->isolate());
128 __ TailCallStub(&stub);
129}
130
131// static
132void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
133 // ----------- S t a t e -------------
134 // -- r2 : number of arguments
135 // -- lr : return address
136 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
137 // -- sp[(argc + 1) * 8] : receiver
138 // -----------------------------------
139 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
140 Heap::RootListIndex const root_index =
141 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
142 : Heap::kMinusInfinityValueRootIndex;
143 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
144
145 // Load the accumulator with the default return value (either -Infinity or
146 // +Infinity), with the tagged value in r3 and the double value in d1.
147 __ LoadRoot(r3, root_index);
148 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset));
149
150 // Setup state for loop
151 // r4: address of arg[0] + kPointerSize
152 // r5: number of slots to drop at exit (arguments + receiver)
153 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
154 __ AddP(r4, sp, r4);
155 __ AddP(r5, r2, Operand(1));
156
157 Label done_loop, loop;
158 __ bind(&loop);
159 {
160 // Check if all parameters done.
161 __ CmpLogicalP(r4, sp);
162 __ ble(&done_loop);
163
164 // Load the next parameter tagged value into r2.
165 __ lay(r4, MemOperand(r4, -kPointerSize));
166 __ LoadP(r2, MemOperand(r4));
167
168 // Load the double value of the parameter into d2, maybe converting the
169 // parameter to a number first using the ToNumberStub if necessary.
170 Label convert, convert_smi, convert_number, done_convert;
171 __ bind(&convert);
172 __ JumpIfSmi(r2, &convert_smi);
173 __ LoadP(r6, FieldMemOperand(r2, HeapObject::kMapOffset));
174 __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number);
175 {
176 // Parameter is not a Number, use the ToNumberStub to convert it.
177 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
178 __ SmiTag(r5);
179 __ Push(r3, r4, r5);
180 ToNumberStub stub(masm->isolate());
181 __ CallStub(&stub);
182 __ Pop(r3, r4, r5);
183 __ SmiUntag(r5);
184 {
185 // Restore the double accumulator value (d1).
186 Label done_restore;
187 __ SmiToDouble(d1, r3);
188 __ JumpIfSmi(r3, &done_restore);
189 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset));
190 __ bind(&done_restore);
191 }
192 }
193 __ b(&convert);
194 __ bind(&convert_number);
195 __ LoadDouble(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
196 __ b(&done_convert);
197 __ bind(&convert_smi);
198 __ SmiToDouble(d2, r2);
199 __ bind(&done_convert);
200
201 // Perform the actual comparison with the accumulator value on the left hand
202 // side (d1) and the next parameter value on the right hand side (d2).
203 Label compare_nan, compare_swap;
204 __ cdbr(d1, d2);
205 __ bunordered(&compare_nan);
206 __ b(cond_done, &loop);
207 __ b(CommuteCondition(cond_done), &compare_swap);
208
209 // Left and right hand side are equal, check for -0 vs. +0.
210 __ TestDoubleIsMinusZero(reg, r6, r7);
211 __ bne(&loop);
212
213 // Update accumulator. Result is on the right hand side.
214 __ bind(&compare_swap);
215 __ ldr(d1, d2);
216 __ LoadRR(r3, r2);
217 __ b(&loop);
218
219 // At least one side is NaN, which means that the result will be NaN too.
220 // We still need to visit the rest of the arguments.
221 __ bind(&compare_nan);
222 __ LoadRoot(r3, Heap::kNanValueRootIndex);
223 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset));
224 __ b(&loop);
225 }
226
227 __ bind(&done_loop);
228 __ LoadRR(r2, r3);
229 __ Drop(r5);
230 __ Ret();
231}
232
233// static
234void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
235 // ----------- S t a t e -------------
236 // -- r2 : number of arguments
237 // -- r3 : constructor function
238 // -- lr : return address
239 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
240 // -- sp[argc * 4] : receiver
241 // -----------------------------------
242
243 // 1. Load the first argument into r2 and get rid of the rest (including the
244 // receiver).
245 Label no_arguments;
246 {
247 __ CmpP(r2, Operand::Zero());
248 __ beq(&no_arguments);
249 __ SubP(r2, r2, Operand(1));
250 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
251 __ la(sp, MemOperand(sp, r2));
252 __ LoadP(r2, MemOperand(sp));
253 __ Drop(2);
254 }
255
256 // 2a. Convert the first argument to a number.
257 ToNumberStub stub(masm->isolate());
258 __ TailCallStub(&stub);
259
260 // 2b. No arguments, return +0.
261 __ bind(&no_arguments);
262 __ LoadSmiLiteral(r2, Smi::FromInt(0));
263 __ Ret(1);
264}
265
266// static
267void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
268 // ----------- S t a t e -------------
269 // -- r2 : number of arguments
270 // -- r3 : constructor function
271 // -- r5 : new target
272 // -- lr : return address
273 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
274 // -- sp[argc * 4] : receiver
275 // -----------------------------------
276
277 // 1. Make sure we operate in the context of the called function.
278 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
279
280 // 2. Load the first argument into r4 and get rid of the rest (including the
281 // receiver).
282 {
283 Label no_arguments, done;
284 __ CmpP(r2, Operand::Zero());
285 __ beq(&no_arguments);
286 __ SubP(r2, r2, Operand(1));
287 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
288 __ la(sp, MemOperand(sp, r4));
289 __ LoadP(r4, MemOperand(sp));
290 __ Drop(2);
291 __ b(&done);
292 __ bind(&no_arguments);
293 __ LoadSmiLiteral(r4, Smi::FromInt(0));
294 __ Drop(1);
295 __ bind(&done);
296 }
297
298 // 3. Make sure r4 is a number.
299 {
300 Label done_convert;
301 __ JumpIfSmi(r4, &done_convert);
302 __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE);
303 __ beq(&done_convert);
304 {
305 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
306 __ Push(r3, r5);
307 __ LoadRR(r2, r4);
308 ToNumberStub stub(masm->isolate());
309 __ CallStub(&stub);
310 __ LoadRR(r4, r2);
311 __ Pop(r3, r5);
312 }
313 __ bind(&done_convert);
314 }
315
316 // 4. Check if new target and constructor differ.
317 Label new_object;
318 __ CmpP(r3, r5);
319 __ bne(&new_object);
320
321 // 5. Allocate a JSValue wrapper for the number.
322 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
323 __ Ret();
324
325 // 6. Fallback to the runtime to create new object.
326 __ bind(&new_object);
327 {
328 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
329 __ Push(r4); // first argument
330 FastNewObjectStub stub(masm->isolate());
331 __ CallStub(&stub);
332 __ Pop(r4);
333 }
334 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
335 __ Ret();
336}
337
338// static
339void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
340 // ----------- S t a t e -------------
341 // -- r2 : number of arguments
342 // -- r3 : constructor function
343 // -- lr : return address
344 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
345 // -- sp[argc * 4] : receiver
346 // -----------------------------------
347 // 1. Load the first argument into r2 and get rid of the rest (including the
348 // receiver).
349 Label no_arguments;
350 {
351 __ CmpP(r2, Operand::Zero());
352 __ beq(&no_arguments);
353 __ SubP(r2, r2, Operand(1));
354 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
355 __ lay(sp, MemOperand(sp, r2));
356 __ LoadP(r2, MemOperand(sp));
357 __ Drop(2);
358 }
359
360 // 2a. At least one argument, return r2 if it's a string, otherwise
361 // dispatch to appropriate conversion.
362 Label to_string, symbol_descriptive_string;
363 {
364 __ JumpIfSmi(r2, &to_string);
365 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
366 __ CompareObjectType(r2, r3, r3, FIRST_NONSTRING_TYPE);
367 __ bgt(&to_string);
368 __ beq(&symbol_descriptive_string);
369 __ Ret();
370 }
371
372 // 2b. No arguments, return the empty string (and pop the receiver).
373 __ bind(&no_arguments);
374 {
375 __ LoadRoot(r2, Heap::kempty_stringRootIndex);
376 __ Ret(1);
377 }
378
379 // 3a. Convert r2 to a string.
380 __ bind(&to_string);
381 {
382 ToStringStub stub(masm->isolate());
383 __ TailCallStub(&stub);
384 }
385 // 3b. Convert symbol in r2 to a string.
386 __ bind(&symbol_descriptive_string);
387 {
388 __ Push(r2);
389 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
390 }
391}
392
393// static
394void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
395 // ----------- S t a t e -------------
396 // -- r2 : number of arguments
397 // -- r3 : constructor function
398 // -- r5 : new target
399 // -- lr : return address
400 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
401 // -- sp[argc * 4] : receiver
402 // -----------------------------------
403
404 // 1. Make sure we operate in the context of the called function.
405 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
406
407 // 2. Load the first argument into r4 and get rid of the rest (including the
408 // receiver).
409 {
410 Label no_arguments, done;
411 __ CmpP(r2, Operand::Zero());
412 __ beq(&no_arguments);
413 __ SubP(r2, r2, Operand(1));
414 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
415 __ lay(sp, MemOperand(sp, r4));
416 __ LoadP(r4, MemOperand(sp));
417 __ Drop(2);
418 __ b(&done);
419 __ bind(&no_arguments);
420 __ LoadRoot(r4, Heap::kempty_stringRootIndex);
421 __ Drop(1);
422 __ bind(&done);
423 }
424
425 // 3. Make sure r4 is a string.
426 {
427 Label convert, done_convert;
428 __ JumpIfSmi(r4, &convert);
429 __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE);
430 __ blt(&done_convert);
431 __ bind(&convert);
432 {
433 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
434 ToStringStub stub(masm->isolate());
435 __ Push(r3, r5);
436 __ LoadRR(r2, r4);
437 __ CallStub(&stub);
438 __ LoadRR(r4, r2);
439 __ Pop(r3, r5);
440 }
441 __ bind(&done_convert);
442 }
443
444 // 4. Check if new target and constructor differ.
445 Label new_object;
446 __ CmpP(r3, r5);
447 __ bne(&new_object);
448
449 // 5. Allocate a JSValue wrapper for the string.
450 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
451 __ Ret();
452
453 // 6. Fallback to the runtime to create new object.
454 __ bind(&new_object);
455 {
456 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
457 __ Push(r4); // first argument
458 FastNewObjectStub stub(masm->isolate());
459 __ CallStub(&stub);
460 __ Pop(r4);
461 }
462 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
463 __ Ret();
464}
465
466static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
467 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
468 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
469 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag));
470 __ JumpToJSEntry(ip);
471}
472
473static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
474 Runtime::FunctionId function_id) {
475 // ----------- S t a t e -------------
476 // -- r2 : argument count (preserved for callee)
477 // -- r3 : target function (preserved for callee)
478 // -- r5 : new target (preserved for callee)
479 // -----------------------------------
480 {
481 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
482 // Push the number of arguments to the callee.
483 // Push a copy of the target function and the new target.
484 // Push function as parameter to the runtime call.
485 __ SmiTag(r2);
486 __ Push(r2, r3, r5, r3);
487
488 __ CallRuntime(function_id, 1);
489 __ LoadRR(r4, r2);
490
491 // Restore target function and new target.
492 __ Pop(r2, r3, r5);
493 __ SmiUntag(r2);
494 }
495 __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
496 __ JumpToJSEntry(ip);
497}
498
499void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
500 // Checking whether the queued function is ready for install is optional,
501 // since we come across interrupts and stack checks elsewhere. However,
502 // not checking may delay installing ready functions, and always checking
503 // would be quite expensive. A good compromise is to first check against
504 // stack limit as a cue for an interrupt signal.
505 Label ok;
506 __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
507 __ bge(&ok, Label::kNear);
508
509 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
510
511 __ bind(&ok);
512 GenerateTailCallToSharedCode(masm);
513}
514
515static void Generate_JSConstructStubHelper(MacroAssembler* masm,
516 bool is_api_function,
517 bool create_implicit_receiver,
518 bool check_derived_construct) {
519 // ----------- S t a t e -------------
520 // -- r2 : number of arguments
521 // -- r3 : constructor function
522 // -- r4 : allocation site or undefined
523 // -- r5 : new target
524 // -- cp : context
525 // -- lr : return address
526 // -- sp[...]: constructor arguments
527 // -----------------------------------
528
529 Isolate* isolate = masm->isolate();
530
531 // Enter a construct frame.
532 {
533 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
534
535 // Preserve the incoming parameters on the stack.
536 __ AssertUndefinedOrAllocationSite(r4, r6);
537
538 if (!create_implicit_receiver) {
539 __ SmiTag(r6, r2);
540 __ LoadAndTestP(r6, r6);
541 __ Push(cp, r4, r6);
542 __ PushRoot(Heap::kTheHoleValueRootIndex);
543 } else {
544 __ SmiTag(r2);
545 __ Push(cp, r4, r2);
546
547 // Allocate the new receiver object.
548 __ Push(r3, r5);
549 FastNewObjectStub stub(masm->isolate());
550 __ CallStub(&stub);
551 __ LoadRR(r6, r2);
552 __ Pop(r3, r5);
553
554 // ----------- S t a t e -------------
555 // -- r3: constructor function
556 // -- r5: new target
557 // -- r6: newly allocated object
558 // -----------------------------------
559
560 // Retrieve smi-tagged arguments count from the stack.
561 __ LoadP(r2, MemOperand(sp));
562 __ SmiUntag(r2);
563 __ LoadAndTestP(r2, r2);
564
565 // Push the allocated receiver to the stack. We need two copies
566 // because we may have to return the original one and the calling
567 // conventions dictate that the called function pops the receiver.
568 __ Push(r6, r6);
569 }
570
571 // Set up pointer to last argument.
572 __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
573
574 // Copy arguments and receiver to the expression stack.
575 // r2: number of arguments
576 // r3: constructor function
577 // r4: address of last argument (caller sp)
578 // r5: new target
579 // cr0: condition indicating whether r2 is zero
580 // sp[0]: receiver
581 // sp[1]: receiver
582 // sp[2]: number of arguments (smi-tagged)
583 Label loop, no_args;
584 __ beq(&no_args);
585 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
586 __ SubP(sp, sp, ip);
587 __ LoadRR(r1, r2);
588 __ bind(&loop);
589 __ lay(ip, MemOperand(ip, -kPointerSize));
590 __ LoadP(r0, MemOperand(ip, r4));
591 __ StoreP(r0, MemOperand(ip, sp));
592 __ BranchOnCount(r1, &loop);
593 __ bind(&no_args);
594
595 // Call the function.
596 // r2: number of arguments
597 // r3: constructor function
598 // r5: new target
599 if (is_api_function) {
600 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
601 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
602 __ Call(code, RelocInfo::CODE_TARGET);
603 } else {
604 ParameterCount actual(r2);
605 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION,
606 CheckDebugStepCallWrapper());
607 }
608
609 // Store offset of return address for deoptimizer.
610 if (create_implicit_receiver && !is_api_function) {
611 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
612 }
613
614 // Restore context from the frame.
615 // r2: result
616 // sp[0]: receiver
617 // sp[1]: number of arguments (smi-tagged)
618 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
619
620 if (create_implicit_receiver) {
621 // If the result is an object (in the ECMA sense), we should get rid
622 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
623 // on page 74.
624 Label use_receiver, exit;
625
626 // If the result is a smi, it is *not* an object in the ECMA sense.
627 // r2: result
628 // sp[0]: receiver
629 // sp[1]: new.target
630 // sp[2]: number of arguments (smi-tagged)
631 __ JumpIfSmi(r2, &use_receiver);
632
633 // If the type of the result (stored in its map) is less than
634 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
635 __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE);
636 __ bge(&exit);
637
638 // Throw away the result of the constructor invocation and use the
639 // on-stack receiver as the result.
640 __ bind(&use_receiver);
641 __ LoadP(r2, MemOperand(sp));
642
643 // Remove receiver from the stack, remove caller arguments, and
644 // return.
645 __ bind(&exit);
646 // r2: result
647 // sp[0]: receiver (newly allocated object)
648 // sp[1]: number of arguments (smi-tagged)
649 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
650 } else {
651 __ LoadP(r3, MemOperand(sp));
652 }
653
654 // Leave construct frame.
655 }
656
657 // ES6 9.2.2. Step 13+
658 // Check that the result is not a Smi, indicating that the constructor result
659 // from a derived class is neither undefined nor an Object.
660 if (check_derived_construct) {
661 Label dont_throw;
662 __ JumpIfNotSmi(r2, &dont_throw);
663 {
664 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
665 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
666 }
667 __ bind(&dont_throw);
668 }
669
670 __ SmiToPtrArrayOffset(r3, r3);
671 __ AddP(sp, sp, r3);
672 __ AddP(sp, sp, Operand(kPointerSize));
673 if (create_implicit_receiver) {
674 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4);
675 }
676 __ Ret();
677}
678
679void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
680 Generate_JSConstructStubHelper(masm, false, true, false);
681}
682
683void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
684 Generate_JSConstructStubHelper(masm, true, false, false);
685}
686
687void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
688 Generate_JSConstructStubHelper(masm, false, false, false);
689}
690
691void Builtins::Generate_JSBuiltinsConstructStubForDerived(
692 MacroAssembler* masm) {
693 Generate_JSConstructStubHelper(masm, false, false, true);
694}
695
696void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
697 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
698 __ push(r3);
699 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
700}
701
702enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
703
704// Clobbers r4; preserves all other registers.
705static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
706 IsTagged argc_is_tagged) {
707 // Check the stack for overflow. We are not trying to catch
708 // interruptions (e.g. debug break and preemption) here, so the "real stack
709 // limit" is checked.
710 Label okay;
711 __ LoadRoot(r4, Heap::kRealStackLimitRootIndex);
712 // Make r4 the space we have left. The stack might already be overflowed
713 // here which will cause r4 to become negative.
714 __ SubP(r4, sp, r4);
715 // Check if the arguments will overflow the stack.
716 if (argc_is_tagged == kArgcIsSmiTagged) {
717 __ SmiToPtrArrayOffset(r0, argc);
718 } else {
719 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
720 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
721 }
722 __ CmpP(r4, r0);
723 __ bgt(&okay); // Signed comparison.
724
725 // Out of stack space.
726 __ CallRuntime(Runtime::kThrowStackOverflow);
727
728 __ bind(&okay);
729}
730
731static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
732 bool is_construct) {
733 // Called from Generate_JS_Entry
734 // r2: new.target
735 // r3: function
736 // r4: receiver
737 // r5: argc
738 // r6: argv
739 // r0,r7-r9, cp may be clobbered
740 ProfileEntryHookStub::MaybeCallEntryHook(masm);
741
742 // Enter an internal frame.
743 {
744 // FrameScope ends up calling MacroAssembler::EnterFrame here
745 FrameScope scope(masm, StackFrame::INTERNAL);
746
747 // Setup the context (we need to use the caller context from the isolate).
748 ExternalReference context_address(Isolate::kContextAddress,
749 masm->isolate());
750 __ mov(cp, Operand(context_address));
751 __ LoadP(cp, MemOperand(cp));
752
753 __ InitializeRootRegister();
754
755 // Push the function and the receiver onto the stack.
756 __ Push(r3, r4);
757
758 // Check if we have enough stack space to push all arguments.
759 // Clobbers r4.
760 Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt);
761
762 // Copy arguments to the stack in a loop from argv to sp.
763 // The arguments are actually placed in reverse order on sp
764 // compared to argv (i.e. arg1 is highest memory in sp).
765 // r3: function
766 // r5: argc
767 // r6: argv, i.e. points to first arg
768 // r7: scratch reg to hold scaled argc
769 // r8: scratch reg to hold arg handle
770 // r9: scratch reg to hold index into argv
771 Label argLoop, argExit;
772 intptr_t zero = 0;
773 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2));
774 __ SubRR(sp, r7); // Buy the stack frame to fit args
775 __ LoadImmP(r9, Operand(zero)); // Initialize argv index
776 __ bind(&argLoop);
777 __ CmpPH(r7, Operand(zero));
778 __ beq(&argExit, Label::kNear);
779 __ lay(r7, MemOperand(r7, -kPointerSize));
780 __ LoadP(r8, MemOperand(r9, r6)); // read next parameter
781 __ la(r9, MemOperand(r9, kPointerSize)); // r9++;
782 __ LoadP(r0, MemOperand(r8)); // dereference handle
783 __ StoreP(r0, MemOperand(r7, sp)); // push parameter
784 __ b(&argLoop);
785 __ bind(&argExit);
786
787 // Setup new.target and argc.
788 __ LoadRR(r6, r2);
789 __ LoadRR(r2, r5);
790 __ LoadRR(r5, r6);
791
792 // Initialize all JavaScript callee-saved registers, since they will be seen
793 // by the garbage collector as part of handlers.
794 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
795 __ LoadRR(r7, r6);
796 __ LoadRR(r8, r6);
797 __ LoadRR(r9, r6);
798
799 // Invoke the code.
800 Handle<Code> builtin = is_construct
801 ? masm->isolate()->builtins()->Construct()
802 : masm->isolate()->builtins()->Call();
803 __ Call(builtin, RelocInfo::CODE_TARGET);
804
805 // Exit the JS frame and remove the parameters (except function), and
806 // return.
807 }
808 __ b(r14);
809
810 // r2: result
811}
812
813void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
814 Generate_JSEntryTrampolineHelper(masm, false);
815}
816
817void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
818 Generate_JSEntryTrampolineHelper(masm, true);
819}
820
821// Generate code for entering a JS function with the interpreter.
822// On entry to the function the receiver and arguments have been pushed on the
823// stack left to right. The actual argument count matches the formal parameter
824// count expected by the function.
825//
826// The live registers are:
827// o r3: the JS function object being called.
828// o r5: the new target
829// o cp: our context
830// o pp: the caller's constant pool pointer (if enabled)
831// o fp: the caller's frame pointer
832// o sp: stack pointer
833// o lr: return address
834//
835// The function builds an interpreter frame. See InterpreterFrameConstants in
836// frames.h for its layout.
837void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
838 // Open a frame scope to indicate that there is a frame on the stack. The
839 // MANUAL indicates that the scope shouldn't actually generate code to set up
840 // the frame (that is done below).
841 FrameScope frame_scope(masm, StackFrame::MANUAL);
842 __ PushStandardFrame(r3);
843
844 // Get the bytecode array from the function object and load the pointer to the
845 // first entry into kInterpreterBytecodeRegister.
846 __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
847 Label array_done;
848 Register debug_info = r4;
849 DCHECK(!debug_info.is(r2));
850 __ LoadP(debug_info,
851 FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
852 // Load original bytecode array or the debug copy.
853 __ LoadP(kInterpreterBytecodeArrayRegister,
854 FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
855 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
856 __ beq(&array_done);
857 __ LoadP(kInterpreterBytecodeArrayRegister,
858 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
859 __ bind(&array_done);
860
861 if (FLAG_debug_code) {
862 // Check function data field is actually a BytecodeArray object.
863 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
864 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
865 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
866 BYTECODE_ARRAY_TYPE);
867 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
868 }
869
870 // Push new.target, bytecode array and zero for bytecode array offset.
871 __ LoadImmP(r2, Operand::Zero());
872 __ Push(r5, kInterpreterBytecodeArrayRegister, r2);
873
874 // Allocate the local and temporary register file on the stack.
875 {
876 // Load frame size (word) from the BytecodeArray object.
877 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
878 BytecodeArray::kFrameSizeOffset));
879
880 // Do a stack check to ensure we don't go over the limit.
881 Label ok;
882 __ SubP(r5, sp, r4);
883 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
884 __ CmpLogicalP(r5, r0);
885 __ bge(&ok);
886 __ CallRuntime(Runtime::kThrowStackOverflow);
887 __ bind(&ok);
888
889 // If ok, push undefined as the initial value for all register file entries.
890 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
891 Label loop, no_args;
892 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
893 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
894 __ LoadAndTestP(r4, r4);
895 __ beq(&no_args);
896 __ LoadRR(r1, r4);
897 __ bind(&loop);
898 __ push(r5);
899 __ SubP(r1, Operand(1));
900 __ bne(&loop);
901 __ bind(&no_args);
902 }
903
904 // TODO(rmcilroy): List of things not currently dealt with here but done in
905 // fullcodegen's prologue:
906 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
907 // - Code aging of the BytecodeArray object.
908
909 // Load accumulator, register file, bytecode offset, dispatch table into
910 // registers.
911 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
912 __ AddP(kInterpreterRegisterFileRegister, fp,
913 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
914 __ mov(kInterpreterBytecodeOffsetRegister,
915 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
916 __ mov(kInterpreterDispatchTableRegister,
917 Operand(ExternalReference::interpreter_dispatch_table_address(
918 masm->isolate())));
919
920 // Dispatch to the first bytecode handler for the function.
921 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
922 kInterpreterBytecodeOffsetRegister));
923 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
924 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
925 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
926 // and header removal.
927 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
928 __ Call(ip);
929
930 // Even though the first bytecode handler was called, we will never return.
931 __ Abort(kUnexpectedReturnFromBytecodeHandler);
932}
933
934void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
935 // TODO(rmcilroy): List of things not currently dealt with here but done in
936 // fullcodegen's EmitReturnSequence.
937 // - Supporting FLAG_trace for Runtime::TraceExit.
938 // - Support profiler (specifically decrementing profiling_counter
939 // appropriately and calling out to HandleInterrupts if necessary).
940
941 // The return value is in accumulator, which is already in r2.
942
943 // Leave the frame (also dropping the register file).
944 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
945
946 // Drop receiver + arguments and return.
947 __ LoadlW(r0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
948 BytecodeArray::kParameterSizeOffset));
949 __ AddP(sp, sp, r0);
950 __ Ret();
951}
952
953static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
954 Register count, Register scratch) {
955 Label loop;
956 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU
957 __ LoadRR(r0, count);
958 __ bind(&loop);
959 __ LoadP(scratch, MemOperand(index, -kPointerSize));
960 __ lay(index, MemOperand(index, -kPointerSize));
961 __ push(scratch);
962 __ SubP(r0, Operand(1));
963 __ bne(&loop);
964}
965
966// static
967void Builtins::Generate_InterpreterPushArgsAndCallImpl(
968 MacroAssembler* masm, TailCallMode tail_call_mode) {
969 // ----------- S t a t e -------------
970 // -- r2 : the number of arguments (not including the receiver)
971 // -- r4 : the address of the first argument to be pushed. Subsequent
972 // arguments should be consecutive above this, in the same order as
973 // they are to be pushed onto the stack.
974 // -- r3 : the target to call (can be any Object).
975 // -----------------------------------
976
977 // Calculate number of arguments (AddP one for receiver).
978 __ AddP(r5, r2, Operand(1));
979
980 // Push the arguments.
981 Generate_InterpreterPushArgs(masm, r4, r5, r6);
982
983 // Call the target.
984 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
985 tail_call_mode),
986 RelocInfo::CODE_TARGET);
987}
988
989// static
990void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
991 // ----------- S t a t e -------------
992 // -- r2 : argument count (not including receiver)
993 // -- r5 : new target
994 // -- r3 : constructor to call
995 // -- r4 : address of the first argument
996 // -----------------------------------
997
998 // Push a slot for the receiver to be constructed.
999 __ LoadImmP(r0, Operand::Zero());
1000 __ push(r0);
1001
1002 // Push the arguments (skip if none).
1003 Label skip;
1004 __ CmpP(r2, Operand::Zero());
1005 __ beq(&skip);
1006 Generate_InterpreterPushArgs(masm, r4, r2, r6);
1007 __ bind(&skip);
1008
1009 // Call the constructor with r2, r3, and r5 unmodified.
1010 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1011}
1012
1013static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
1014 // Initialize register file register and dispatch table register.
1015 __ AddP(kInterpreterRegisterFileRegister, fp,
1016 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
1017 __ mov(kInterpreterDispatchTableRegister,
1018 Operand(ExternalReference::interpreter_dispatch_table_address(
1019 masm->isolate())));
1020
1021 // Get the context from the frame.
1022 __ LoadP(kContextRegister,
1023 MemOperand(kInterpreterRegisterFileRegister,
1024 InterpreterFrameConstants::kContextFromRegisterPointer));
1025
1026 // Get the bytecode array pointer from the frame.
1027 __ LoadP(
1028 kInterpreterBytecodeArrayRegister,
1029 MemOperand(kInterpreterRegisterFileRegister,
1030 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
1031
1032 if (FLAG_debug_code) {
1033 // Check function data field is actually a BytecodeArray object.
1034 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1035 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1036 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1037 BYTECODE_ARRAY_TYPE);
1038 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1039 }
1040
1041 // Get the target bytecode offset from the frame.
1042 __ LoadP(kInterpreterBytecodeOffsetRegister,
1043 MemOperand(
1044 kInterpreterRegisterFileRegister,
1045 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1046 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1047
1048 // Dispatch to the target bytecode.
1049 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1050 kInterpreterBytecodeOffsetRegister));
1051 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
1052 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1053 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1054 __ Jump(ip);
1055}
1056
1057static void Generate_InterpreterNotifyDeoptimizedHelper(
1058 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1059 // Enter an internal frame.
1060 {
1061 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1062
1063 // Pass the deoptimization type to the runtime system.
1064 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1065 __ Push(r3);
1066 __ CallRuntime(Runtime::kNotifyDeoptimized);
1067 // Tear down internal frame.
1068 }
1069
1070 // Drop state (we don't use these for interpreter deopts) and and pop the
1071 // accumulator value into the accumulator register.
1072 __ Drop(1);
1073 __ Pop(kInterpreterAccumulatorRegister);
1074
1075 // Enter the bytecode dispatch.
1076 Generate_EnterBytecodeDispatch(masm);
1077}
1078
1079void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1080 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1081}
1082
1083void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1084 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1085}
1086
1087void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1088 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1089}
1090
1091void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1092 // Set the address of the interpreter entry trampoline as a return address.
1093 // This simulates the initial call to bytecode handlers in interpreter entry
1094 // trampoline. The return will never actually be taken, but our stack walker
1095 // uses this address to determine whether a frame is interpreted.
1096 __ mov(r14,
1097 Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1098
1099 Generate_EnterBytecodeDispatch(masm);
1100}
1101
1102void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1103 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1104}
1105
1106void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1107 GenerateTailCallToReturnedCode(masm,
1108 Runtime::kCompileOptimized_NotConcurrent);
1109}
1110
1111void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1112 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1113}
1114
1115static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1116 // For now, we are relying on the fact that make_code_young doesn't do any
1117 // garbage collection which allows us to save/restore the registers without
1118 // worrying about which of them contain pointers. We also don't build an
1119 // internal frame to make the code faster, since we shouldn't have to do stack
1120 // crawls in MakeCodeYoung. This seems a bit fragile.
1121
1122 // Point r2 at the start of the PlatformCodeAge sequence.
1123 __ CleanseP(r14);
1124 __ SubP(r14, Operand(kCodeAgingSequenceLength));
1125 __ LoadRR(r2, r14);
1126
1127 __ pop(r14);
1128
1129 // The following registers must be saved and restored when calling through to
1130 // the runtime:
1131 // r2 - contains return address (beginning of patch sequence)
1132 // r3 - isolate
1133 // r5 - new target
1134 // lr - return address
1135 FrameScope scope(masm, StackFrame::MANUAL);
1136 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1137 __ PrepareCallCFunction(2, 0, r4);
1138 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1139 __ CallCFunction(
1140 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1141 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1142 __ LoadRR(ip, r2);
1143 __ Jump(ip);
1144}
1145
1146#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1147 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1148 MacroAssembler* masm) { \
1149 GenerateMakeCodeYoungAgainCommon(masm); \
1150 } \
1151 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1152 MacroAssembler* masm) { \
1153 GenerateMakeCodeYoungAgainCommon(masm); \
1154 }
1155CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1156#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1157
1158void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1159 // For now, we are relying on the fact that make_code_young doesn't do any
1160 // garbage collection which allows us to save/restore the registers without
1161 // worrying about which of them contain pointers. We also don't build an
1162 // internal frame to make the code faster, since we shouldn't have to do stack
1163 // crawls in MakeCodeYoung. This seems a bit fragile.
1164
1165 // Point r2 at the start of the PlatformCodeAge sequence.
1166 __ CleanseP(r14);
1167 __ SubP(r14, Operand(kCodeAgingSequenceLength));
1168 __ LoadRR(r2, r14);
1169
1170 __ pop(r14);
1171
1172 // The following registers must be saved and restored when calling through to
1173 // the runtime:
1174 // r2 - contains return address (beginning of patch sequence)
1175 // r3 - isolate
1176 // r5 - new target
1177 // lr - return address
1178 FrameScope scope(masm, StackFrame::MANUAL);
1179 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1180 __ PrepareCallCFunction(2, 0, r4);
1181 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1182 __ CallCFunction(
1183 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1184 2);
1185 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1186 __ LoadRR(ip, r2);
1187
1188 // Perform prologue operations usually performed by the young code stub.
1189 __ PushStandardFrame(r3);
1190
1191 // Jump to point after the code-age stub.
1192 __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength));
1193 __ Jump(r2);
1194}
1195
1196void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1197 GenerateMakeCodeYoungAgainCommon(masm);
1198}
1199
1200void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1201 Generate_MarkCodeAsExecutedOnce(masm);
1202}
1203
1204static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1205 SaveFPRegsMode save_doubles) {
1206 {
1207 FrameScope scope(masm, StackFrame::INTERNAL);
1208
1209 // Preserve registers across notification, this is important for compiled
1210 // stubs that tail call the runtime on deopts passing their parameters in
1211 // registers.
1212 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1213 // Pass the function and deoptimization type to the runtime system.
1214 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1215 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1216 }
1217
1218 __ la(sp, MemOperand(sp, kPointerSize)); // Ignore state
1219 __ Ret(); // Jump to miss handler
1220}
1221
1222void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1223 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1224}
1225
1226void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1227 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1228}
1229
1230static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1231 Deoptimizer::BailoutType type) {
1232 {
1233 FrameScope scope(masm, StackFrame::INTERNAL);
1234 // Pass the function and deoptimization type to the runtime system.
1235 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type)));
1236 __ push(r2);
1237 __ CallRuntime(Runtime::kNotifyDeoptimized);
1238 }
1239
1240 // Get the full codegen state from the stack and untag it -> r8.
1241 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize));
1242 __ SmiUntag(r8);
1243 // Switch on the state.
1244 Label with_tos_register, unknown_state;
1245 __ CmpP(r8, Operand(FullCodeGenerator::NO_REGISTERS));
1246 __ bne(&with_tos_register);
1247 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state.
1248 __ Ret();
1249
1250 __ bind(&with_tos_register);
1251 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize));
1252 __ CmpP(r8, Operand(FullCodeGenerator::TOS_REG));
1253 __ bne(&unknown_state);
1254 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state.
1255 __ Ret();
1256
1257 __ bind(&unknown_state);
1258 __ stop("no cases left");
1259}
1260
1261void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1262 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1263}
1264
1265void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1266 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1267}
1268
1269void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1270 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1271}
1272
1273// Clobbers registers {r6, r7, r8, r9}.
1274void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1275 Register function_template_info,
1276 Label* receiver_check_failed) {
1277 Register signature = r6;
1278 Register map = r7;
1279 Register constructor = r8;
1280 Register scratch = r9;
1281
1282 // If there is no signature, return the holder.
1283 __ LoadP(signature, FieldMemOperand(function_template_info,
1284 FunctionTemplateInfo::kSignatureOffset));
1285 Label receiver_check_passed;
1286 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1287 &receiver_check_passed);
1288
1289 // Walk the prototype chain.
1290 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1291 Label prototype_loop_start;
1292 __ bind(&prototype_loop_start);
1293
1294 // Get the constructor, if any.
1295 __ GetMapConstructor(constructor, map, scratch, scratch);
1296 __ CmpP(scratch, Operand(JS_FUNCTION_TYPE));
1297 Label next_prototype;
1298 __ bne(&next_prototype);
1299 Register type = constructor;
1300 __ LoadP(type,
1301 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1302 __ LoadP(type,
1303 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1304
1305 // Loop through the chain of inheriting function templates.
1306 Label function_template_loop;
1307 __ bind(&function_template_loop);
1308
1309 // If the signatures match, we have a compatible receiver.
1310 __ CmpP(signature, type);
1311 __ beq(&receiver_check_passed);
1312
1313 // If the current type is not a FunctionTemplateInfo, load the next prototype
1314 // in the chain.
1315 __ JumpIfSmi(type, &next_prototype);
1316 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1317 __ bne(&next_prototype);
1318
1319 // Otherwise load the parent function template and iterate.
1320 __ LoadP(type,
1321 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1322 __ b(&function_template_loop);
1323
1324 // Load the next prototype.
1325 __ bind(&next_prototype);
1326 __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1327 __ DecodeField<Map::HasHiddenPrototype>(scratch);
1328 __ beq(receiver_check_failed);
1329
1330 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1331 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1332 // Iterate.
1333 __ b(&prototype_loop_start);
1334
1335 __ bind(&receiver_check_passed);
1336}
1337
1338void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1339 // ----------- S t a t e -------------
1340 // -- r2 : number of arguments excluding receiver
1341 // -- r3 : callee
1342 // -- lr : return address
1343 // -- sp[0] : last argument
1344 // -- ...
1345 // -- sp[4 * (argc - 1)] : first argument
1346 // -- sp[4 * argc] : receiver
1347 // -----------------------------------
1348
1349 // Load the FunctionTemplateInfo.
1350 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1351 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1352
1353 // Do the compatible receiver check.
1354 Label receiver_check_failed;
1355 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
1356 __ LoadP(r4, MemOperand(sp, r1));
1357 CompatibleReceiverCheck(masm, r4, r5, &receiver_check_failed);
1358
1359 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1360 // beginning of the code.
1361 __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset));
1362 __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset));
1363 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1364 __ JumpToJSEntry(ip);
1365
1366 // Compatible receiver check failed: throw an Illegal Invocation exception.
1367 __ bind(&receiver_check_failed);
1368 // Drop the arguments (including the receiver);
1369 __ AddP(r1, r1, Operand(kPointerSize));
1370 __ AddP(sp, sp, r1);
1371 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1372}
1373
1374void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1375 // Lookup the function in the JavaScript frame.
1376 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1377 {
1378 FrameScope scope(masm, StackFrame::INTERNAL);
1379 // Pass function as argument.
1380 __ push(r2);
1381 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1382 }
1383
1384 // If the code object is null, just return to the unoptimized code.
1385 Label skip;
1386 __ CmpSmiLiteral(r2, Smi::FromInt(0), r0);
1387 __ bne(&skip);
1388 __ Ret();
1389
1390 __ bind(&skip);
1391
1392 // Load deoptimization data from the code object.
1393 // <deopt_data> = <code>[#deoptimization_data_offset]
1394 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
1395
1396 // Load the OSR entrypoint offset from the deoptimization data.
1397 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1398 __ LoadP(
1399 r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
1400 DeoptimizationInputData::kOsrPcOffsetIndex)));
1401 __ SmiUntag(r3);
1402
1403 // Compute the target address = code_obj + header_size + osr_offset
1404 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1405 __ AddP(r2, r3);
1406 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1407 __ LoadRR(r14, r0);
1408
1409 // And "return" to the OSR entry point of the function.
1410 __ Ret();
1411}
1412
1413// static
1414void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1415 int field_index) {
1416 // ----------- S t a t e -------------
1417 // -- lr : return address
1418 // -- sp[0] : receiver
1419 // -----------------------------------
1420
1421 // 1. Pop receiver into r2 and check that it's actually a JSDate object.
1422 Label receiver_not_date;
1423 {
1424 __ Pop(r2);
1425 __ JumpIfSmi(r2, &receiver_not_date);
1426 __ CompareObjectType(r2, r3, r4, JS_DATE_TYPE);
1427 __ bne(&receiver_not_date);
1428 }
1429
1430 // 2. Load the specified date field, falling back to the runtime as necessary.
1431 if (field_index == JSDate::kDateValue) {
1432 __ LoadP(r2, FieldMemOperand(r2, JSDate::kValueOffset));
1433 } else {
1434 if (field_index < JSDate::kFirstUncachedField) {
1435 Label stamp_mismatch;
1436 __ mov(r3, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1437 __ LoadP(r3, MemOperand(r3));
1438 __ LoadP(ip, FieldMemOperand(r2, JSDate::kCacheStampOffset));
1439 __ CmpP(r3, ip);
1440 __ bne(&stamp_mismatch);
1441 __ LoadP(r2, FieldMemOperand(
1442 r2, JSDate::kValueOffset + field_index * kPointerSize));
1443 __ Ret();
1444 __ bind(&stamp_mismatch);
1445 }
1446 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1447 __ PrepareCallCFunction(2, r3);
1448 __ LoadSmiLiteral(r3, Smi::FromInt(field_index));
1449 __ CallCFunction(
1450 ExternalReference::get_date_field_function(masm->isolate()), 2);
1451 }
1452 __ Ret();
1453
1454 // 3. Raise a TypeError if the receiver is not a date.
1455 __ bind(&receiver_not_date);
1456 __ TailCallRuntime(Runtime::kThrowNotDateError);
1457}
1458
1459// static
1460void Builtins::Generate_FunctionHasInstance(MacroAssembler* masm) {
1461 // ----------- S t a t e -------------
1462 // -- r2 : argc
1463 // -- sp[0] : first argument (left-hand side)
1464 // -- sp[4] : receiver (right-hand side)
1465 // -----------------------------------
1466
1467 {
1468 FrameScope scope(masm, StackFrame::INTERNAL);
1469 __ LoadP(InstanceOfDescriptor::LeftRegister(),
1470 MemOperand(fp, 2 * kPointerSize)); // Load left-hand side.
1471 __ LoadP(InstanceOfDescriptor::RightRegister(),
1472 MemOperand(fp, 3 * kPointerSize)); // Load right-hand side.
1473 InstanceOfStub stub(masm->isolate(), true);
1474 __ CallStub(&stub);
1475 }
1476
1477 // Pop the argument and the receiver.
1478 __ Ret(2);
1479}
1480
1481// static
1482void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1483 // ----------- S t a t e -------------
1484 // -- r2 : argc
1485 // -- sp[0] : argArray
1486 // -- sp[4] : thisArg
1487 // -- sp[8] : receiver
1488 // -----------------------------------
1489
1490 // 1. Load receiver into r3, argArray into r2 (if present), remove all
1491 // arguments from the stack (including the receiver), and push thisArg (if
1492 // present) instead.
1493 {
1494 Label skip;
1495 Register arg_size = r4;
1496 Register new_sp = r5;
1497 Register scratch = r6;
1498 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1499 __ AddP(new_sp, sp, arg_size);
1500 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1501 __ LoadRR(scratch, r2);
1502 __ LoadP(r3, MemOperand(new_sp, 0)); // receiver
1503 __ CmpP(arg_size, Operand(kPointerSize));
1504 __ blt(&skip);
1505 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1506 __ beq(&skip);
1507 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1508 __ bind(&skip);
1509 __ LoadRR(sp, new_sp);
1510 __ StoreP(scratch, MemOperand(sp, 0));
1511 }
1512
1513 // ----------- S t a t e -------------
1514 // -- r2 : argArray
1515 // -- r3 : receiver
1516 // -- sp[0] : thisArg
1517 // -----------------------------------
1518
1519 // 2. Make sure the receiver is actually callable.
1520 Label receiver_not_callable;
1521 __ JumpIfSmi(r3, &receiver_not_callable);
1522 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1523 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1524 __ TestBit(r6, Map::kIsCallable);
1525 __ beq(&receiver_not_callable);
1526
1527 // 3. Tail call with no arguments if argArray is null or undefined.
1528 Label no_arguments;
1529 __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments);
1530 __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments);
1531
1532 // 4a. Apply the receiver to the given argArray (passing undefined for
1533 // new.target).
1534 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1535 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1536
1537 // 4b. The argArray is either null or undefined, so we tail call without any
1538 // arguments to the receiver.
1539 __ bind(&no_arguments);
1540 {
1541 __ LoadImmP(r2, Operand::Zero());
1542 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1543 }
1544
1545 // 4c. The receiver is not callable, throw an appropriate TypeError.
1546 __ bind(&receiver_not_callable);
1547 {
1548 __ StoreP(r3, MemOperand(sp, 0));
1549 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1550 }
1551}
1552
1553// static
1554void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1555 // 1. Make sure we have at least one argument.
1556 // r2: actual number of arguments
1557 {
1558 Label done;
1559 __ CmpP(r2, Operand::Zero());
1560 __ bne(&done, Label::kNear);
1561 __ PushRoot(Heap::kUndefinedValueRootIndex);
1562 __ AddP(r2, Operand(1));
1563 __ bind(&done);
1564 }
1565
1566 // r2: actual number of arguments
1567 // 2. Get the callable to call (passed as receiver) from the stack.
1568 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
1569 __ LoadP(r3, MemOperand(sp, r4));
1570
1571 // 3. Shift arguments and return address one slot down on the stack
1572 // (overwriting the original receiver). Adjust argument count to make
1573 // the original first argument the new receiver.
1574 // r2: actual number of arguments
1575 // r3: callable
1576 {
1577 Label loop;
1578 // Calculate the copy start address (destination). Copy end address is sp.
1579 __ AddP(r4, sp, r4);
1580
1581 __ bind(&loop);
1582 __ LoadP(ip, MemOperand(r4, -kPointerSize));
1583 __ StoreP(ip, MemOperand(r4));
1584 __ SubP(r4, Operand(kPointerSize));
1585 __ CmpP(r4, sp);
1586 __ bne(&loop);
1587 // Adjust the actual number of arguments and remove the top element
1588 // (which is a copy of the last argument).
1589 __ SubP(r2, Operand(1));
1590 __ pop();
1591 }
1592
1593 // 4. Call the callable.
1594 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1595}
1596
1597void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1598 // ----------- S t a t e -------------
1599 // -- r2 : argc
1600 // -- sp[0] : argumentsList
1601 // -- sp[4] : thisArgument
1602 // -- sp[8] : target
1603 // -- sp[12] : receiver
1604 // -----------------------------------
1605
1606 // 1. Load target into r3 (if present), argumentsList into r2 (if present),
1607 // remove all arguments from the stack (including the receiver), and push
1608 // thisArgument (if present) instead.
1609 {
1610 Label skip;
1611 Register arg_size = r4;
1612 Register new_sp = r5;
1613 Register scratch = r6;
1614 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1615 __ AddP(new_sp, sp, arg_size);
1616 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1617 __ LoadRR(scratch, r3);
1618 __ LoadRR(r2, r3);
1619 __ CmpP(arg_size, Operand(kPointerSize));
1620 __ blt(&skip);
1621 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
1622 __ beq(&skip);
1623 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1624 __ CmpP(arg_size, Operand(2 * kPointerSize));
1625 __ beq(&skip);
1626 __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1627 __ bind(&skip);
1628 __ LoadRR(sp, new_sp);
1629 __ StoreP(scratch, MemOperand(sp, 0));
1630 }
1631
1632 // ----------- S t a t e -------------
1633 // -- r2 : argumentsList
1634 // -- r3 : target
1635 // -- sp[0] : thisArgument
1636 // -----------------------------------
1637
1638 // 2. Make sure the target is actually callable.
1639 Label target_not_callable;
1640 __ JumpIfSmi(r3, &target_not_callable);
1641 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1642 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1643 __ TestBit(r6, Map::kIsCallable);
1644 __ beq(&target_not_callable);
1645
1646 // 3a. Apply the target to the given argumentsList (passing undefined for
1647 // new.target).
1648 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1649 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1650
1651 // 3b. The target is not callable, throw an appropriate TypeError.
1652 __ bind(&target_not_callable);
1653 {
1654 __ StoreP(r3, MemOperand(sp, 0));
1655 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1656 }
1657}
1658
1659void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1660 // ----------- S t a t e -------------
1661 // -- r2 : argc
1662 // -- sp[0] : new.target (optional)
1663 // -- sp[4] : argumentsList
1664 // -- sp[8] : target
1665 // -- sp[12] : receiver
1666 // -----------------------------------
1667
1668 // 1. Load target into r3 (if present), argumentsList into r2 (if present),
1669 // new.target into r5 (if present, otherwise use target), remove all
1670 // arguments from the stack (including the receiver), and push thisArgument
1671 // (if present) instead.
1672 {
1673 Label skip;
1674 Register arg_size = r4;
1675 Register new_sp = r6;
1676 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1677 __ AddP(new_sp, sp, arg_size);
1678 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1679 __ LoadRR(r2, r3);
1680 __ LoadRR(r5, r3);
1681 __ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined)
1682 __ CmpP(arg_size, Operand(kPointerSize));
1683 __ blt(&skip);
1684 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
1685 __ LoadRR(r5, r3); // new.target defaults to target
1686 __ beq(&skip);
1687 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1688 __ CmpP(arg_size, Operand(2 * kPointerSize));
1689 __ beq(&skip);
1690 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1691 __ bind(&skip);
1692 __ LoadRR(sp, new_sp);
1693 }
1694
1695 // ----------- S t a t e -------------
1696 // -- r2 : argumentsList
1697 // -- r5 : new.target
1698 // -- r3 : target
1699 // -- sp[0] : receiver (undefined)
1700 // -----------------------------------
1701
1702 // 2. Make sure the target is actually a constructor.
1703 Label target_not_constructor;
1704 __ JumpIfSmi(r3, &target_not_constructor);
1705 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1706 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1707 __ TestBit(r6, Map::kIsConstructor);
1708 __ beq(&target_not_constructor);
1709
1710 // 3. Make sure the target is actually a constructor.
1711 Label new_target_not_constructor;
1712 __ JumpIfSmi(r5, &new_target_not_constructor);
1713 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
1714 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1715 __ TestBit(r6, Map::kIsConstructor);
1716 __ beq(&new_target_not_constructor);
1717
1718 // 4a. Construct the target with the given new.target and argumentsList.
1719 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1720
1721 // 4b. The target is not a constructor, throw an appropriate TypeError.
1722 __ bind(&target_not_constructor);
1723 {
1724 __ StoreP(r3, MemOperand(sp, 0));
1725 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1726 }
1727
1728 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1729 __ bind(&new_target_not_constructor);
1730 {
1731 __ StoreP(r5, MemOperand(sp, 0));
1732 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1733 }
1734}
1735
1736static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1737 Label* stack_overflow) {
1738 // ----------- S t a t e -------------
1739 // -- r2 : actual number of arguments
1740 // -- r3 : function (passed through to callee)
1741 // -- r4 : expected number of arguments
1742 // -- r5 : new target (passed through to callee)
1743 // -----------------------------------
1744 // Check the stack for overflow. We are not trying to catch
1745 // interruptions (e.g. debug break and preemption) here, so the "real stack
1746 // limit" is checked.
1747 __ LoadRoot(r7, Heap::kRealStackLimitRootIndex);
1748 // Make r7 the space we have left. The stack might already be overflowed
1749 // here which will cause r7 to become negative.
1750 __ SubP(r7, sp, r7);
1751 // Check if the arguments will overflow the stack.
1752 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
1753 __ CmpP(r7, r0);
1754 __ ble(stack_overflow); // Signed comparison.
1755}
1756
1757static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1758 __ SmiTag(r2);
1759 __ LoadSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1760 // Stack updated as such:
1761 // old SP --->
1762 // R14 Return Addr
1763 // Old FP <--- New FP
1764 // Argument Adapter SMI
1765 // Function
1766 // ArgC as SMI <--- New SP
1767 __ lay(sp, MemOperand(sp, -5 * kPointerSize));
1768
1769 // Cleanse the top nibble of 31-bit pointers.
1770 __ CleanseP(r14);
1771 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize));
1772 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize));
1773 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
1774 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize));
1775 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize));
1776 __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp +
1777 kPointerSize));
1778}
1779
1780static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1781 // ----------- S t a t e -------------
1782 // -- r2 : result being passed through
1783 // -----------------------------------
1784 // Get the number of arguments passed (as a smi), tear down the frame and
1785 // then tear down the parameters.
1786 __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1787 kPointerSize)));
1788 int stack_adjustment = kPointerSize; // adjust for receiver
1789 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1790 __ SmiToPtrArrayOffset(r3, r3);
1791 __ lay(sp, MemOperand(sp, r3));
1792}
1793
1794// static
1795void Builtins::Generate_Apply(MacroAssembler* masm) {
1796 // ----------- S t a t e -------------
1797 // -- r2 : argumentsList
1798 // -- r3 : target
1799 // -- r5 : new.target (checked to be constructor or undefined)
1800 // -- sp[0] : thisArgument
1801 // -----------------------------------
1802
1803 // Create the list of arguments from the array-like argumentsList.
1804 {
1805 Label create_arguments, create_array, create_runtime, done_create;
1806 __ JumpIfSmi(r2, &create_runtime);
1807
1808 // Load the map of argumentsList into r4.
1809 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
1810
1811 // Load native context into r6.
1812 __ LoadP(r6, NativeContextMemOperand());
1813
1814 // Check if argumentsList is an (unmodified) arguments object.
1815 __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1816 __ CmpP(ip, r4);
1817 __ beq(&create_arguments);
1818 __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX));
1819 __ CmpP(ip, r4);
1820 __ beq(&create_arguments);
1821
1822 // Check if argumentsList is a fast JSArray.
1823 __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE);
1824 __ beq(&create_array);
1825
1826 // Ask the runtime to create the list (actually a FixedArray).
1827 __ bind(&create_runtime);
1828 {
1829 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1830 __ Push(r3, r5, r2);
1831 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1832 __ Pop(r3, r5);
1833 __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
1834 __ SmiUntag(r4);
1835 }
1836 __ b(&done_create);
1837
1838 // Try to create the list from an arguments object.
1839 __ bind(&create_arguments);
1840 __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset));
1841 __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
1842 __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset));
1843 __ CmpP(r4, ip);
1844 __ bne(&create_runtime);
1845 __ SmiUntag(r4);
1846 __ LoadRR(r2, r6);
1847 __ b(&done_create);
1848
1849 // Try to create the list from a JSArray object.
1850 __ bind(&create_array);
1851 __ LoadlB(r4, FieldMemOperand(r4, Map::kBitField2Offset));
1852 __ DecodeField<Map::ElementsKindBits>(r4);
1853 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1854 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1855 STATIC_ASSERT(FAST_ELEMENTS == 2);
1856 __ CmpP(r4, Operand(FAST_ELEMENTS));
1857 __ bgt(&create_runtime);
1858 __ CmpP(r4, Operand(FAST_HOLEY_SMI_ELEMENTS));
1859 __ beq(&create_runtime);
1860 __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset));
1861 __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset));
1862 __ SmiUntag(r4);
1863
1864 __ bind(&done_create);
1865 }
1866
1867 // Check for stack overflow.
1868 {
1869 // Check the stack for overflow. We are not trying to catch interruptions
1870 // (i.e. debug break and preemption) here, so check the "real stack limit".
1871 Label done;
1872 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1873 // Make ip the space we have left. The stack might already be overflowed
1874 // here which will cause ip to become negative.
1875 __ SubP(ip, sp, ip);
1876 // Check if the arguments will overflow the stack.
1877 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
1878 __ CmpP(ip, r0); // Signed comparison.
1879 __ bgt(&done);
1880 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1881 __ bind(&done);
1882 }
1883
1884 // ----------- S t a t e -------------
1885 // -- r3 : target
1886 // -- r2 : args (a FixedArray built from argumentsList)
1887 // -- r4 : len (number of elements to push from args)
1888 // -- r5 : new.target (checked to be constructor or undefined)
1889 // -- sp[0] : thisArgument
1890 // -----------------------------------
1891
1892 // Push arguments onto the stack (thisArgument is already on the stack).
1893 {
1894 Label loop, no_args;
1895 __ CmpP(r4, Operand::Zero());
1896 __ beq(&no_args);
1897 __ AddP(r2, r2,
1898 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1899 __ LoadRR(r1, r4);
1900 __ bind(&loop);
1901 __ LoadP(r0, MemOperand(r2, kPointerSize));
1902 __ la(r2, MemOperand(r2, kPointerSize));
1903 __ push(r0);
1904 __ BranchOnCount(r1, &loop);
1905 __ bind(&no_args);
1906 __ LoadRR(r2, r4);
1907 }
1908
1909 // Dispatch to Call or Construct depending on whether new.target is undefined.
1910 {
1911 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
1912 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1913 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1914 }
1915}
1916
1917namespace {
1918
1919// Drops top JavaScript frame and an arguments adaptor frame below it (if
1920// present) preserving all the arguments prepared for current call.
1921// Does nothing if debugger is currently active.
1922// ES6 14.6.3. PrepareForTailCall
1923//
1924// Stack structure for the function g() tail calling f():
1925//
1926// ------- Caller frame: -------
1927// | ...
1928// | g()'s arg M
1929// | ...
1930// | g()'s arg 1
1931// | g()'s receiver arg
1932// | g()'s caller pc
1933// ------- g()'s frame: -------
1934// | g()'s caller fp <- fp
1935// | g()'s context
1936// | function pointer: g
1937// | -------------------------
1938// | ...
1939// | ...
1940// | f()'s arg N
1941// | ...
1942// | f()'s arg 1
1943// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1944// ----------------------
1945//
1946void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1947 Register scratch1, Register scratch2,
1948 Register scratch3) {
1949 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1950 Comment cmnt(masm, "[ PrepareForTailCall");
1951
1952 // Prepare for tail call only if ES2015 tail call elimination is active.
1953 Label done;
1954 ExternalReference is_tail_call_elimination_enabled =
1955 ExternalReference::is_tail_call_elimination_enabled_address(
1956 masm->isolate());
1957 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
1958 __ LoadlB(scratch1, MemOperand(scratch1));
1959 __ CmpP(scratch1, Operand::Zero());
1960 __ beq(&done);
1961
1962 // Drop possible interpreter handler/stub frame.
1963 {
1964 Label no_interpreter_frame;
1965 __ LoadP(scratch3,
1966 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
1967 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
1968 __ bne(&no_interpreter_frame);
1969 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1970 __ bind(&no_interpreter_frame);
1971 }
1972
1973 // Check if next frame is an arguments adaptor frame.
1974 Register caller_args_count_reg = scratch1;
1975 Label no_arguments_adaptor, formal_parameter_count_loaded;
1976 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1977 __ LoadP(
1978 scratch3,
1979 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
1980 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
1981 __ bne(&no_arguments_adaptor);
1982
1983 // Drop current frame and load arguments count from arguments adaptor frame.
1984 __ LoadRR(fp, scratch2);
1985 __ LoadP(caller_args_count_reg,
1986 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1987 __ SmiUntag(caller_args_count_reg);
1988 __ b(&formal_parameter_count_loaded);
1989
1990 __ bind(&no_arguments_adaptor);
1991 // Load caller's formal parameter count
1992 __ LoadP(scratch1,
1993 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
1994 __ LoadP(scratch1,
1995 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
1996 __ LoadW(caller_args_count_reg,
1997 FieldMemOperand(scratch1,
1998 SharedFunctionInfo::kFormalParameterCountOffset));
1999#if !V8_TARGET_ARCH_S390X
2000 __ SmiUntag(caller_args_count_reg);
2001#endif
2002
2003 __ bind(&formal_parameter_count_loaded);
2004
2005 ParameterCount callee_args_count(args_reg);
2006 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2007 scratch3);
2008 __ bind(&done);
2009}
2010} // namespace
2011
2012// static
2013void Builtins::Generate_CallFunction(MacroAssembler* masm,
2014 ConvertReceiverMode mode,
2015 TailCallMode tail_call_mode) {
2016 // ----------- S t a t e -------------
2017 // -- r2 : the number of arguments (not including the receiver)
2018 // -- r3 : the function to call (checked to be a JSFunction)
2019 // -----------------------------------
2020 __ AssertFunction(r3);
2021
2022 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2023 // Check that the function is not a "classConstructor".
2024 Label class_constructor;
2025 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2026 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
2027 __ TestBitMask(r5, SharedFunctionInfo::kClassConstructorBits, r0);
2028 __ bne(&class_constructor);
2029
2030 // Enter the context of the function; ToObject has to run in the function
2031 // context, and we also need to take the global proxy from the function
2032 // context in case of conversion.
2033 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
2034 // We need to convert the receiver for non-native sloppy mode functions.
2035 Label done_convert;
2036 __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2037 (1 << SharedFunctionInfo::kNativeBit)));
2038 __ bne(&done_convert);
2039 {
2040 // ----------- S t a t e -------------
2041 // -- r2 : the number of arguments (not including the receiver)
2042 // -- r3 : the function to call (checked to be a JSFunction)
2043 // -- r4 : the shared function info.
2044 // -- cp : the function context.
2045 // -----------------------------------
2046
2047 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2048 // Patch receiver to global proxy.
2049 __ LoadGlobalProxy(r5);
2050 } else {
2051 Label convert_to_object, convert_receiver;
2052 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2));
2053 __ LoadP(r5, MemOperand(sp, r5));
2054 __ JumpIfSmi(r5, &convert_to_object);
2055 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2056 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
2057 __ bge(&done_convert);
2058 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2059 Label convert_global_proxy;
2060 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex,
2061 &convert_global_proxy);
2062 __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object);
2063 __ bind(&convert_global_proxy);
2064 {
2065 // Patch receiver to global proxy.
2066 __ LoadGlobalProxy(r5);
2067 }
2068 __ b(&convert_receiver);
2069 }
2070 __ bind(&convert_to_object);
2071 {
2072 // Convert receiver using ToObject.
2073 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2074 // in the fast case? (fall back to AllocateInNewSpace?)
2075 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2076 __ SmiTag(r2);
2077 __ Push(r2, r3);
2078 __ LoadRR(r2, r5);
2079 ToObjectStub stub(masm->isolate());
2080 __ CallStub(&stub);
2081 __ LoadRR(r5, r2);
2082 __ Pop(r2, r3);
2083 __ SmiUntag(r2);
2084 }
2085 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2086 __ bind(&convert_receiver);
2087 }
2088 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2));
2089 __ StoreP(r5, MemOperand(sp, r6));
2090 }
2091 __ bind(&done_convert);
2092
2093 // ----------- S t a t e -------------
2094 // -- r2 : the number of arguments (not including the receiver)
2095 // -- r3 : the function to call (checked to be a JSFunction)
2096 // -- r4 : the shared function info.
2097 // -- cp : the function context.
2098 // -----------------------------------
2099
2100 if (tail_call_mode == TailCallMode::kAllow) {
2101 PrepareForTailCall(masm, r2, r5, r6, r7);
2102 }
2103
2104 __ LoadW(
2105 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
2106#if !V8_TARGET_ARCH_S390X
2107 __ SmiUntag(r4);
2108#endif
2109 ParameterCount actual(r2);
2110 ParameterCount expected(r4);
2111 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION,
2112 CheckDebugStepCallWrapper());
2113
2114 // The function is a "classConstructor", need to raise an exception.
2115 __ bind(&class_constructor);
2116 {
2117 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2118 __ push(r3);
2119 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2120 }
2121}
2122
2123namespace {
2124
2125void Generate_PushBoundArguments(MacroAssembler* masm) {
2126 // ----------- S t a t e -------------
2127 // -- r2 : the number of arguments (not including the receiver)
2128 // -- r3 : target (checked to be a JSBoundFunction)
2129 // -- r5 : new.target (only in case of [[Construct]])
2130 // -----------------------------------
2131
2132 // Load [[BoundArguments]] into r4 and length of that into r6.
2133 Label no_bound_arguments;
2134 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
2135 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
2136 __ SmiUntag(r6);
2137 __ LoadAndTestP(r6, r6);
2138 __ beq(&no_bound_arguments);
2139 {
2140 // ----------- S t a t e -------------
2141 // -- r2 : the number of arguments (not including the receiver)
2142 // -- r3 : target (checked to be a JSBoundFunction)
2143 // -- r4 : the [[BoundArguments]] (implemented as FixedArray)
2144 // -- r5 : new.target (only in case of [[Construct]])
2145 // -- r6 : the number of [[BoundArguments]]
2146 // -----------------------------------
2147
2148 // Reserve stack space for the [[BoundArguments]].
2149 {
2150 Label done;
2151 __ LoadRR(r8, sp); // preserve previous stack pointer
2152 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
2153 __ SubP(sp, sp, r9);
2154 // Check the stack for overflow. We are not trying to catch interruptions
2155 // (i.e. debug break and preemption) here, so check the "real stack
2156 // limit".
2157 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2158 __ bgt(&done); // Signed comparison.
2159 // Restore the stack pointer.
2160 __ LoadRR(sp, r8);
2161 {
2162 FrameScope scope(masm, StackFrame::MANUAL);
2163 __ EnterFrame(StackFrame::INTERNAL);
2164 __ CallRuntime(Runtime::kThrowStackOverflow);
2165 }
2166 __ bind(&done);
2167 }
2168
2169 // Relocate arguments down the stack.
2170 // -- r2 : the number of arguments (not including the receiver)
2171 // -- r8 : the previous stack pointer
2172 // -- r9: the size of the [[BoundArguments]]
2173 {
2174 Label skip, loop;
2175 __ LoadImmP(r7, Operand::Zero());
2176 __ CmpP(r2, Operand::Zero());
2177 __ beq(&skip);
2178 __ LoadRR(r1, r2);
2179 __ bind(&loop);
2180 __ LoadP(r0, MemOperand(r8, r7));
2181 __ StoreP(r0, MemOperand(sp, r7));
2182 __ AddP(r7, r7, Operand(kPointerSize));
2183 __ BranchOnCount(r1, &loop);
2184 __ bind(&skip);
2185 }
2186
2187 // Copy [[BoundArguments]] to the stack (below the arguments).
2188 {
2189 Label loop;
2190 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2191 __ AddP(r4, r4, r9);
2192 __ LoadRR(r1, r6);
2193 __ bind(&loop);
2194 __ LoadP(r0, MemOperand(r4, -kPointerSize));
2195 __ lay(r4, MemOperand(r4, -kPointerSize));
2196 __ StoreP(r0, MemOperand(sp, r7));
2197 __ AddP(r7, r7, Operand(kPointerSize));
2198 __ BranchOnCount(r1, &loop);
2199 __ AddP(r2, r2, r6);
2200 }
2201 }
2202 __ bind(&no_bound_arguments);
2203}
2204
2205} // namespace
2206
2207// static
2208void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2209 TailCallMode tail_call_mode) {
2210 // ----------- S t a t e -------------
2211 // -- r2 : the number of arguments (not including the receiver)
2212 // -- r3 : the function to call (checked to be a JSBoundFunction)
2213 // -----------------------------------
2214 __ AssertBoundFunction(r3);
2215
2216 if (tail_call_mode == TailCallMode::kAllow) {
2217 PrepareForTailCall(masm, r2, r5, r6, r7);
2218 }
2219
2220 // Patch the receiver to [[BoundThis]].
2221 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
2222 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
2223 __ StoreP(ip, MemOperand(sp, r1));
2224
2225 // Push the [[BoundArguments]] onto the stack.
2226 Generate_PushBoundArguments(masm);
2227
2228 // Call the [[BoundTargetFunction]] via the Call builtin.
2229 __ LoadP(r3,
2230 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2231 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2232 masm->isolate())));
2233 __ LoadP(ip, MemOperand(ip));
2234 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2235 __ JumpToJSEntry(ip);
2236}
2237
2238// static
2239void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2240 TailCallMode tail_call_mode) {
2241 // ----------- S t a t e -------------
2242 // -- r2 : the number of arguments (not including the receiver)
2243 // -- r3 : the target to call (can be any Object).
2244 // -----------------------------------
2245
2246 Label non_callable, non_function, non_smi;
2247 __ JumpIfSmi(r3, &non_callable);
2248 __ bind(&non_smi);
2249 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2250 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2251 RelocInfo::CODE_TARGET, eq);
2252 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2253 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2254 RelocInfo::CODE_TARGET, eq);
2255
2256 // Check if target has a [[Call]] internal method.
2257 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2258 __ TestBit(r6, Map::kIsCallable);
2259 __ beq(&non_callable);
2260
2261 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2262 __ bne(&non_function);
2263
2264 // 0. Prepare for tail call if necessary.
2265 if (tail_call_mode == TailCallMode::kAllow) {
2266 PrepareForTailCall(masm, r2, r5, r6, r7);
2267 }
2268
2269 // 1. Runtime fallback for Proxy [[Call]].
2270 __ Push(r3);
2271 // Increase the arguments size to include the pushed function and the
2272 // existing receiver on the stack.
2273 __ AddP(r2, r2, Operand(2));
2274 // Tail-call to the runtime.
2275 __ JumpToExternalReference(
2276 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2277
2278 // 2. Call to something else, which might have a [[Call]] internal method (if
2279 // not we raise an exception).
2280 __ bind(&non_function);
2281 // Overwrite the original receiver the (original) target.
2282 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2283 __ StoreP(r3, MemOperand(sp, r7));
2284 // Let the "call_as_function_delegate" take care of the rest.
2285 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
2286 __ Jump(masm->isolate()->builtins()->CallFunction(
2287 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2288 RelocInfo::CODE_TARGET);
2289
2290 // 3. Call to something that is not callable.
2291 __ bind(&non_callable);
2292 {
2293 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2294 __ Push(r3);
2295 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2296 }
2297}
2298
2299// static
2300void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2301 // ----------- S t a t e -------------
2302 // -- r2 : the number of arguments (not including the receiver)
2303 // -- r3 : the constructor to call (checked to be a JSFunction)
2304 // -- r5 : the new target (checked to be a constructor)
2305 // -----------------------------------
2306 __ AssertFunction(r3);
2307
2308 // Calling convention for function specific ConstructStubs require
2309 // r4 to contain either an AllocationSite or undefined.
2310 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2311
2312 // Tail call to the function-specific construct stub (still in the caller
2313 // context at this point).
2314 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2315 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
2316 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
2317 __ JumpToJSEntry(ip);
2318}
2319
2320// static
2321void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2322 // ----------- S t a t e -------------
2323 // -- r2 : the number of arguments (not including the receiver)
2324 // -- r3 : the function to call (checked to be a JSBoundFunction)
2325 // -- r5 : the new target (checked to be a constructor)
2326 // -----------------------------------
2327 __ AssertBoundFunction(r3);
2328
2329 // Push the [[BoundArguments]] onto the stack.
2330 Generate_PushBoundArguments(masm);
2331
2332 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2333 Label skip;
2334 __ CmpP(r3, r5);
2335 __ bne(&skip);
2336 __ LoadP(r5,
2337 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2338 __ bind(&skip);
2339
2340 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2341 __ LoadP(r3,
2342 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2343 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2344 __ LoadP(ip, MemOperand(ip));
2345 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2346 __ JumpToJSEntry(ip);
2347}
2348
2349// static
2350void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2351 // ----------- S t a t e -------------
2352 // -- r2 : the number of arguments (not including the receiver)
2353 // -- r3 : the constructor to call (checked to be a JSProxy)
2354 // -- r5 : the new target (either the same as the constructor or
2355 // the JSFunction on which new was invoked initially)
2356 // -----------------------------------
2357
2358 // Call into the Runtime for Proxy [[Construct]].
2359 __ Push(r3, r5);
2360 // Include the pushed new_target, constructor and the receiver.
2361 __ AddP(r2, r2, Operand(3));
2362 // Tail-call to the runtime.
2363 __ JumpToExternalReference(
2364 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2365}
2366
2367// static
2368void Builtins::Generate_Construct(MacroAssembler* masm) {
2369 // ----------- S t a t e -------------
2370 // -- r2 : the number of arguments (not including the receiver)
2371 // -- r3 : the constructor to call (can be any Object)
2372 // -- r5 : the new target (either the same as the constructor or
2373 // the JSFunction on which new was invoked initially)
2374 // -----------------------------------
2375
2376 // Check if target is a Smi.
2377 Label non_constructor;
2378 __ JumpIfSmi(r3, &non_constructor);
2379
2380 // Dispatch based on instance type.
2381 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2382 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2383 RelocInfo::CODE_TARGET, eq);
2384
2385 // Check if target has a [[Construct]] internal method.
2386 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
2387 __ TestBit(r4, Map::kIsConstructor);
2388 __ beq(&non_constructor);
2389
2390 // Only dispatch to bound functions after checking whether they are
2391 // constructors.
2392 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2393 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2394 RelocInfo::CODE_TARGET, eq);
2395
2396 // Only dispatch to proxies after checking whether they are constructors.
2397 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2398 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2399 eq);
2400
2401 // Called Construct on an exotic Object with a [[Construct]] internal method.
2402 {
2403 // Overwrite the original receiver with the (original) target.
2404 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2405 __ StoreP(r3, MemOperand(sp, r7));
2406 // Let the "call_as_constructor_delegate" take care of the rest.
2407 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
2408 __ Jump(masm->isolate()->builtins()->CallFunction(),
2409 RelocInfo::CODE_TARGET);
2410 }
2411
2412 // Called Construct on an Object that doesn't have a [[Construct]] internal
2413 // method.
2414 __ bind(&non_constructor);
2415 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2416 RelocInfo::CODE_TARGET);
2417}
2418
2419void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2420 // ----------- S t a t e -------------
2421 // -- r2 : actual number of arguments
2422 // -- r3 : function (passed through to callee)
2423 // -- r4 : expected number of arguments
2424 // -- r5 : new target (passed through to callee)
2425 // -----------------------------------
2426
2427 Label invoke, dont_adapt_arguments, stack_overflow;
2428
2429 Label enough, too_few;
2430 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
2431 __ CmpP(r2, r4);
2432 __ blt(&too_few);
2433 __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2434 __ beq(&dont_adapt_arguments);
2435
2436 { // Enough parameters: actual >= expected
2437 __ bind(&enough);
2438 EnterArgumentsAdaptorFrame(masm);
2439 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2440
2441 // Calculate copy start address into r2 and copy end address into r6.
2442 // r2: actual number of arguments as a smi
2443 // r3: function
2444 // r4: expected number of arguments
2445 // r5: new target (passed through to callee)
2446 // ip: code entry to call
2447 __ SmiToPtrArrayOffset(r2, r2);
2448 __ AddP(r2, fp);
2449 // adjust for return address and receiver
2450 __ AddP(r2, r2, Operand(2 * kPointerSize));
2451 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2452 __ SubP(r6, r2, r6);
2453
2454 // Copy the arguments (including the receiver) to the new stack frame.
2455 // r2: copy start address
2456 // r3: function
2457 // r4: expected number of arguments
2458 // r5: new target (passed through to callee)
2459 // r6: copy end address
2460 // ip: code entry to call
2461
2462 Label copy;
2463 __ bind(&copy);
2464 __ LoadP(r0, MemOperand(r2, 0));
2465 __ push(r0);
2466 __ CmpP(r2, r6); // Compare before moving to next argument.
2467 __ lay(r2, MemOperand(r2, -kPointerSize));
2468 __ bne(&copy);
2469
2470 __ b(&invoke);
2471 }
2472
2473 { // Too few parameters: Actual < expected
2474 __ bind(&too_few);
2475
2476 EnterArgumentsAdaptorFrame(masm);
2477 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2478
2479 // Calculate copy start address into r0 and copy end address is fp.
2480 // r2: actual number of arguments as a smi
2481 // r3: function
2482 // r4: expected number of arguments
2483 // r5: new target (passed through to callee)
2484 // ip: code entry to call
2485 __ SmiToPtrArrayOffset(r2, r2);
2486 __ lay(r2, MemOperand(r2, fp));
2487
2488 // Copy the arguments (including the receiver) to the new stack frame.
2489 // r2: copy start address
2490 // r3: function
2491 // r4: expected number of arguments
2492 // r5: new target (passed through to callee)
2493 // ip: code entry to call
2494 Label copy;
2495 __ bind(&copy);
2496 // Adjust load for return address and receiver.
2497 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize));
2498 __ push(r0);
2499 __ CmpP(r2, fp); // Compare before moving to next argument.
2500 __ lay(r2, MemOperand(r2, -kPointerSize));
2501 __ bne(&copy);
2502
2503 // Fill the remaining expected arguments with undefined.
2504 // r3: function
2505 // r4: expected number of argumentus
2506 // ip: code entry to call
2507 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2508 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2509 __ SubP(r6, fp, r6);
2510 // Adjust for frame.
2511 __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2512 2 * kPointerSize));
2513
2514 Label fill;
2515 __ bind(&fill);
2516 __ push(r0);
2517 __ CmpP(sp, r6);
2518 __ bne(&fill);
2519 }
2520
2521 // Call the entry point.
2522 __ bind(&invoke);
2523 __ LoadRR(r2, r4);
2524 // r2 : expected number of arguments
2525 // r3 : function (passed through to callee)
2526 // r5 : new target (passed through to callee)
2527 __ CallJSEntry(ip);
2528
2529 // Store offset of return address for deoptimizer.
2530 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2531
2532 // Exit frame and return.
2533 LeaveArgumentsAdaptorFrame(masm);
2534 __ Ret();
2535
2536 // -------------------------------------------
2537 // Dont adapt arguments.
2538 // -------------------------------------------
2539 __ bind(&dont_adapt_arguments);
2540 __ JumpToJSEntry(ip);
2541
2542 __ bind(&stack_overflow);
2543 {
2544 FrameScope frame(masm, StackFrame::MANUAL);
2545 __ CallRuntime(Runtime::kThrowStackOverflow);
2546 __ bkpt(0);
2547 }
2548}
2549
2550#undef __
2551
2552} // namespace internal
2553} // namespace v8
2554
2555#endif // V8_TARGET_ARCH_S390