blob: 8c2283fd5d520aea900571b02450558487010aa3 [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_S390
6
7#include "src/codegen.h"
8#include "src/debug/debug.h"
9#include "src/deoptimizer.h"
10#include "src/full-codegen/full-codegen.h"
11#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16#define __ ACCESS_MASM(masm)
17
18void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
19 BuiltinExtraArguments extra_args) {
20 // ----------- S t a t e -------------
21 // -- r2 : number of arguments excluding receiver
22 // -- r3 : target
23 // -- r5 : new.target
24 // -- sp[0] : last argument
25 // -- ...
26 // -- sp[4 * (argc - 1)] : first argument
27 // -- sp[4 * argc] : receiver
28 // -----------------------------------
29 __ AssertFunction(r3);
30
31 // Make sure we operate in the context of the called function (for example
32 // ConstructStubs implemented in C++ will be run in the context of the caller
33 // instead of the callee, due to the way that [[Construct]] is defined for
34 // ordinary functions).
35 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
36
37 // Insert extra arguments.
38 int num_extra_args = 0;
39 switch (extra_args) {
40 case BuiltinExtraArguments::kTarget:
41 __ Push(r3);
42 ++num_extra_args;
43 break;
44 case BuiltinExtraArguments::kNewTarget:
45 __ Push(r5);
46 ++num_extra_args;
47 break;
48 case BuiltinExtraArguments::kTargetAndNewTarget:
49 __ Push(r3, r5);
50 num_extra_args += 2;
51 break;
52 case BuiltinExtraArguments::kNone:
53 break;
54 }
55
56 // JumpToExternalReference expects r2 to contain the number of arguments
57 // including the receiver and the extra arguments.
58 __ AddP(r2, r2, Operand(num_extra_args + 1));
59
60 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
61}
62
63// Load the built-in InternalArray function from the current context.
64static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
65 Register result) {
66 // Load the InternalArray function from the current native context.
67 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
68}
69
70// Load the built-in Array function from the current context.
71static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
72 // Load the Array function from the current native context.
73 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
74}
75
76void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
77 // ----------- S t a t e -------------
78 // -- r2 : number of arguments
79 // -- lr : return address
80 // -- sp[...]: constructor arguments
81 // -----------------------------------
82 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
83
84 // Get the InternalArray function.
85 GenerateLoadInternalArrayFunction(masm, r3);
86
87 if (FLAG_debug_code) {
88 // Initial map for the builtin InternalArray functions should be maps.
89 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
90 __ TestIfSmi(r4);
91 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
92 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
93 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
94 }
95
96 // Run the native code for the InternalArray function called as a normal
97 // function.
98 // tail call a stub
99 InternalArrayConstructorStub stub(masm->isolate());
100 __ TailCallStub(&stub);
101}
102
103void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
104 // ----------- S t a t e -------------
105 // -- r2 : number of arguments
106 // -- lr : return address
107 // -- sp[...]: constructor arguments
108 // -----------------------------------
109 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
110
111 // Get the Array function.
112 GenerateLoadArrayFunction(masm, r3);
113
114 if (FLAG_debug_code) {
115 // Initial map for the builtin Array functions should be maps.
116 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
117 __ TestIfSmi(r4);
118 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
119 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
120 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
121 }
122
123 __ LoadRR(r5, r3);
124 // Run the native code for the Array function called as a normal function.
125 // tail call a stub
126 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
127 ArrayConstructorStub stub(masm->isolate());
128 __ TailCallStub(&stub);
129}
130
131// static
132void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
133 // ----------- S t a t e -------------
134 // -- r2 : number of arguments
135 // -- lr : return address
136 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
137 // -- sp[(argc + 1) * 8] : receiver
138 // -----------------------------------
139 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
140 Heap::RootListIndex const root_index =
141 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
142 : Heap::kMinusInfinityValueRootIndex;
143 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
144
145 // Load the accumulator with the default return value (either -Infinity or
146 // +Infinity), with the tagged value in r3 and the double value in d1.
147 __ LoadRoot(r3, root_index);
148 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset));
149
150 // Setup state for loop
151 // r4: address of arg[0] + kPointerSize
152 // r5: number of slots to drop at exit (arguments + receiver)
153 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
154 __ AddP(r4, sp, r4);
155 __ AddP(r5, r2, Operand(1));
156
157 Label done_loop, loop;
158 __ bind(&loop);
159 {
160 // Check if all parameters done.
161 __ CmpLogicalP(r4, sp);
162 __ ble(&done_loop);
163
164 // Load the next parameter tagged value into r2.
165 __ lay(r4, MemOperand(r4, -kPointerSize));
166 __ LoadP(r2, MemOperand(r4));
167
168 // Load the double value of the parameter into d2, maybe converting the
169 // parameter to a number first using the ToNumberStub if necessary.
170 Label convert, convert_smi, convert_number, done_convert;
171 __ bind(&convert);
172 __ JumpIfSmi(r2, &convert_smi);
173 __ LoadP(r6, FieldMemOperand(r2, HeapObject::kMapOffset));
174 __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number);
175 {
176 // Parameter is not a Number, use the ToNumberStub to convert it.
177 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
178 __ SmiTag(r5);
179 __ Push(r3, r4, r5);
180 ToNumberStub stub(masm->isolate());
181 __ CallStub(&stub);
182 __ Pop(r3, r4, r5);
183 __ SmiUntag(r5);
184 {
185 // Restore the double accumulator value (d1).
186 Label done_restore;
187 __ SmiToDouble(d1, r3);
188 __ JumpIfSmi(r3, &done_restore);
189 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset));
190 __ bind(&done_restore);
191 }
192 }
193 __ b(&convert);
194 __ bind(&convert_number);
195 __ LoadDouble(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
196 __ b(&done_convert);
197 __ bind(&convert_smi);
198 __ SmiToDouble(d2, r2);
199 __ bind(&done_convert);
200
201 // Perform the actual comparison with the accumulator value on the left hand
202 // side (d1) and the next parameter value on the right hand side (d2).
203 Label compare_nan, compare_swap;
204 __ cdbr(d1, d2);
205 __ bunordered(&compare_nan);
206 __ b(cond_done, &loop);
207 __ b(CommuteCondition(cond_done), &compare_swap);
208
209 // Left and right hand side are equal, check for -0 vs. +0.
210 __ TestDoubleIsMinusZero(reg, r6, r7);
211 __ bne(&loop);
212
213 // Update accumulator. Result is on the right hand side.
214 __ bind(&compare_swap);
215 __ ldr(d1, d2);
216 __ LoadRR(r3, r2);
217 __ b(&loop);
218
219 // At least one side is NaN, which means that the result will be NaN too.
220 // We still need to visit the rest of the arguments.
221 __ bind(&compare_nan);
222 __ LoadRoot(r3, Heap::kNanValueRootIndex);
223 __ LoadDouble(d1, FieldMemOperand(r3, HeapNumber::kValueOffset));
224 __ b(&loop);
225 }
226
227 __ bind(&done_loop);
228 __ LoadRR(r2, r3);
229 __ Drop(r5);
230 __ Ret();
231}
232
233// static
234void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
235 // ----------- S t a t e -------------
236 // -- r2 : number of arguments
237 // -- r3 : constructor function
238 // -- lr : return address
239 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
240 // -- sp[argc * 4] : receiver
241 // -----------------------------------
242
243 // 1. Load the first argument into r2 and get rid of the rest (including the
244 // receiver).
245 Label no_arguments;
246 {
247 __ CmpP(r2, Operand::Zero());
248 __ beq(&no_arguments);
249 __ SubP(r2, r2, Operand(1));
250 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
251 __ la(sp, MemOperand(sp, r2));
252 __ LoadP(r2, MemOperand(sp));
253 __ Drop(2);
254 }
255
256 // 2a. Convert the first argument to a number.
257 ToNumberStub stub(masm->isolate());
258 __ TailCallStub(&stub);
259
260 // 2b. No arguments, return +0.
261 __ bind(&no_arguments);
262 __ LoadSmiLiteral(r2, Smi::FromInt(0));
263 __ Ret(1);
264}
265
266// static
267void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
268 // ----------- S t a t e -------------
269 // -- r2 : number of arguments
270 // -- r3 : constructor function
271 // -- r5 : new target
272 // -- lr : return address
273 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
274 // -- sp[argc * 4] : receiver
275 // -----------------------------------
276
277 // 1. Make sure we operate in the context of the called function.
278 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
279
280 // 2. Load the first argument into r4 and get rid of the rest (including the
281 // receiver).
282 {
283 Label no_arguments, done;
284 __ CmpP(r2, Operand::Zero());
285 __ beq(&no_arguments);
286 __ SubP(r2, r2, Operand(1));
287 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
288 __ la(sp, MemOperand(sp, r4));
289 __ LoadP(r4, MemOperand(sp));
290 __ Drop(2);
291 __ b(&done);
292 __ bind(&no_arguments);
293 __ LoadSmiLiteral(r4, Smi::FromInt(0));
294 __ Drop(1);
295 __ bind(&done);
296 }
297
298 // 3. Make sure r4 is a number.
299 {
300 Label done_convert;
301 __ JumpIfSmi(r4, &done_convert);
302 __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE);
303 __ beq(&done_convert);
304 {
305 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
306 __ Push(r3, r5);
307 __ LoadRR(r2, r4);
308 ToNumberStub stub(masm->isolate());
309 __ CallStub(&stub);
310 __ LoadRR(r4, r2);
311 __ Pop(r3, r5);
312 }
313 __ bind(&done_convert);
314 }
315
316 // 4. Check if new target and constructor differ.
317 Label new_object;
318 __ CmpP(r3, r5);
319 __ bne(&new_object);
320
321 // 5. Allocate a JSValue wrapper for the number.
322 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
323 __ Ret();
324
325 // 6. Fallback to the runtime to create new object.
326 __ bind(&new_object);
327 {
328 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
329 __ Push(r4); // first argument
330 FastNewObjectStub stub(masm->isolate());
331 __ CallStub(&stub);
332 __ Pop(r4);
333 }
334 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
335 __ Ret();
336}
337
338// static
339void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
340 // ----------- S t a t e -------------
341 // -- r2 : number of arguments
342 // -- r3 : constructor function
343 // -- lr : return address
344 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
345 // -- sp[argc * 4] : receiver
346 // -----------------------------------
347 // 1. Load the first argument into r2 and get rid of the rest (including the
348 // receiver).
349 Label no_arguments;
350 {
351 __ CmpP(r2, Operand::Zero());
352 __ beq(&no_arguments);
353 __ SubP(r2, r2, Operand(1));
354 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
355 __ lay(sp, MemOperand(sp, r2));
356 __ LoadP(r2, MemOperand(sp));
357 __ Drop(2);
358 }
359
360 // 2a. At least one argument, return r2 if it's a string, otherwise
361 // dispatch to appropriate conversion.
362 Label to_string, symbol_descriptive_string;
363 {
364 __ JumpIfSmi(r2, &to_string);
365 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
366 __ CompareObjectType(r2, r3, r3, FIRST_NONSTRING_TYPE);
367 __ bgt(&to_string);
368 __ beq(&symbol_descriptive_string);
369 __ Ret();
370 }
371
372 // 2b. No arguments, return the empty string (and pop the receiver).
373 __ bind(&no_arguments);
374 {
375 __ LoadRoot(r2, Heap::kempty_stringRootIndex);
376 __ Ret(1);
377 }
378
379 // 3a. Convert r2 to a string.
380 __ bind(&to_string);
381 {
382 ToStringStub stub(masm->isolate());
383 __ TailCallStub(&stub);
384 }
385 // 3b. Convert symbol in r2 to a string.
386 __ bind(&symbol_descriptive_string);
387 {
388 __ Push(r2);
389 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
390 }
391}
392
393// static
394void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
395 // ----------- S t a t e -------------
396 // -- r2 : number of arguments
397 // -- r3 : constructor function
398 // -- r5 : new target
399 // -- lr : return address
400 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
401 // -- sp[argc * 4] : receiver
402 // -----------------------------------
403
404 // 1. Make sure we operate in the context of the called function.
405 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
406
407 // 2. Load the first argument into r4 and get rid of the rest (including the
408 // receiver).
409 {
410 Label no_arguments, done;
411 __ CmpP(r2, Operand::Zero());
412 __ beq(&no_arguments);
413 __ SubP(r2, r2, Operand(1));
414 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
415 __ lay(sp, MemOperand(sp, r4));
416 __ LoadP(r4, MemOperand(sp));
417 __ Drop(2);
418 __ b(&done);
419 __ bind(&no_arguments);
420 __ LoadRoot(r4, Heap::kempty_stringRootIndex);
421 __ Drop(1);
422 __ bind(&done);
423 }
424
425 // 3. Make sure r4 is a string.
426 {
427 Label convert, done_convert;
428 __ JumpIfSmi(r4, &convert);
429 __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE);
430 __ blt(&done_convert);
431 __ bind(&convert);
432 {
433 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
434 ToStringStub stub(masm->isolate());
435 __ Push(r3, r5);
436 __ LoadRR(r2, r4);
437 __ CallStub(&stub);
438 __ LoadRR(r4, r2);
439 __ Pop(r3, r5);
440 }
441 __ bind(&done_convert);
442 }
443
444 // 4. Check if new target and constructor differ.
445 Label new_object;
446 __ CmpP(r3, r5);
447 __ bne(&new_object);
448
449 // 5. Allocate a JSValue wrapper for the string.
450 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
451 __ Ret();
452
453 // 6. Fallback to the runtime to create new object.
454 __ bind(&new_object);
455 {
456 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
457 __ Push(r4); // first argument
458 FastNewObjectStub stub(masm->isolate());
459 __ CallStub(&stub);
460 __ Pop(r4);
461 }
462 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
463 __ Ret();
464}
465
466static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
467 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
468 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
469 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag));
470 __ JumpToJSEntry(ip);
471}
472
473static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
474 Runtime::FunctionId function_id) {
475 // ----------- S t a t e -------------
476 // -- r2 : argument count (preserved for callee)
477 // -- r3 : target function (preserved for callee)
478 // -- r5 : new target (preserved for callee)
479 // -----------------------------------
480 {
481 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
482 // Push the number of arguments to the callee.
483 // Push a copy of the target function and the new target.
484 // Push function as parameter to the runtime call.
485 __ SmiTag(r2);
486 __ Push(r2, r3, r5, r3);
487
488 __ CallRuntime(function_id, 1);
489 __ LoadRR(r4, r2);
490
491 // Restore target function and new target.
492 __ Pop(r2, r3, r5);
493 __ SmiUntag(r2);
494 }
495 __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
496 __ JumpToJSEntry(ip);
497}
498
499void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
500 // Checking whether the queued function is ready for install is optional,
501 // since we come across interrupts and stack checks elsewhere. However,
502 // not checking may delay installing ready functions, and always checking
503 // would be quite expensive. A good compromise is to first check against
504 // stack limit as a cue for an interrupt signal.
505 Label ok;
506 __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
507 __ bge(&ok, Label::kNear);
508
509 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
510
511 __ bind(&ok);
512 GenerateTailCallToSharedCode(masm);
513}
514
515static void Generate_JSConstructStubHelper(MacroAssembler* masm,
516 bool is_api_function,
517 bool create_implicit_receiver,
518 bool check_derived_construct) {
519 // ----------- S t a t e -------------
520 // -- r2 : number of arguments
521 // -- r3 : constructor function
522 // -- r4 : allocation site or undefined
523 // -- r5 : new target
524 // -- cp : context
525 // -- lr : return address
526 // -- sp[...]: constructor arguments
527 // -----------------------------------
528
529 Isolate* isolate = masm->isolate();
530
531 // Enter a construct frame.
532 {
533 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
534
535 // Preserve the incoming parameters on the stack.
536 __ AssertUndefinedOrAllocationSite(r4, r6);
537
538 if (!create_implicit_receiver) {
539 __ SmiTag(r6, r2);
540 __ LoadAndTestP(r6, r6);
541 __ Push(cp, r4, r6);
542 __ PushRoot(Heap::kTheHoleValueRootIndex);
543 } else {
544 __ SmiTag(r2);
545 __ Push(cp, r4, r2);
546
547 // Allocate the new receiver object.
548 __ Push(r3, r5);
549 FastNewObjectStub stub(masm->isolate());
550 __ CallStub(&stub);
551 __ LoadRR(r6, r2);
552 __ Pop(r3, r5);
553
554 // ----------- S t a t e -------------
555 // -- r3: constructor function
556 // -- r5: new target
557 // -- r6: newly allocated object
558 // -----------------------------------
559
560 // Retrieve smi-tagged arguments count from the stack.
561 __ LoadP(r2, MemOperand(sp));
562 __ SmiUntag(r2);
563 __ LoadAndTestP(r2, r2);
564
565 // Push the allocated receiver to the stack. We need two copies
566 // because we may have to return the original one and the calling
567 // conventions dictate that the called function pops the receiver.
568 __ Push(r6, r6);
569 }
570
571 // Set up pointer to last argument.
572 __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
573
574 // Copy arguments and receiver to the expression stack.
575 // r2: number of arguments
576 // r3: constructor function
577 // r4: address of last argument (caller sp)
578 // r5: new target
579 // cr0: condition indicating whether r2 is zero
580 // sp[0]: receiver
581 // sp[1]: receiver
582 // sp[2]: number of arguments (smi-tagged)
583 Label loop, no_args;
584 __ beq(&no_args);
585 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
586 __ SubP(sp, sp, ip);
587 __ LoadRR(r1, r2);
588 __ bind(&loop);
589 __ lay(ip, MemOperand(ip, -kPointerSize));
590 __ LoadP(r0, MemOperand(ip, r4));
591 __ StoreP(r0, MemOperand(ip, sp));
592 __ BranchOnCount(r1, &loop);
593 __ bind(&no_args);
594
595 // Call the function.
596 // r2: number of arguments
597 // r3: constructor function
598 // r5: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100599
600 ParameterCount actual(r2);
601 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION,
602 CheckDebugStepCallWrapper());
Ben Murdochda12d292016-06-02 14:46:10 +0100603
604 // Store offset of return address for deoptimizer.
605 if (create_implicit_receiver && !is_api_function) {
606 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
607 }
608
609 // Restore context from the frame.
610 // r2: result
611 // sp[0]: receiver
612 // sp[1]: number of arguments (smi-tagged)
613 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
614
615 if (create_implicit_receiver) {
616 // If the result is an object (in the ECMA sense), we should get rid
617 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
618 // on page 74.
619 Label use_receiver, exit;
620
621 // If the result is a smi, it is *not* an object in the ECMA sense.
622 // r2: result
623 // sp[0]: receiver
624 // sp[1]: new.target
625 // sp[2]: number of arguments (smi-tagged)
626 __ JumpIfSmi(r2, &use_receiver);
627
628 // If the type of the result (stored in its map) is less than
629 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
630 __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE);
631 __ bge(&exit);
632
633 // Throw away the result of the constructor invocation and use the
634 // on-stack receiver as the result.
635 __ bind(&use_receiver);
636 __ LoadP(r2, MemOperand(sp));
637
638 // Remove receiver from the stack, remove caller arguments, and
639 // return.
640 __ bind(&exit);
641 // r2: result
642 // sp[0]: receiver (newly allocated object)
643 // sp[1]: number of arguments (smi-tagged)
644 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
645 } else {
646 __ LoadP(r3, MemOperand(sp));
647 }
648
649 // Leave construct frame.
650 }
651
652 // ES6 9.2.2. Step 13+
653 // Check that the result is not a Smi, indicating that the constructor result
654 // from a derived class is neither undefined nor an Object.
655 if (check_derived_construct) {
656 Label dont_throw;
657 __ JumpIfNotSmi(r2, &dont_throw);
658 {
659 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
660 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
661 }
662 __ bind(&dont_throw);
663 }
664
665 __ SmiToPtrArrayOffset(r3, r3);
666 __ AddP(sp, sp, r3);
667 __ AddP(sp, sp, Operand(kPointerSize));
668 if (create_implicit_receiver) {
669 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4);
670 }
671 __ Ret();
672}
673
674void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
675 Generate_JSConstructStubHelper(masm, false, true, false);
676}
677
678void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
679 Generate_JSConstructStubHelper(masm, true, false, false);
680}
681
682void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
683 Generate_JSConstructStubHelper(masm, false, false, false);
684}
685
686void Builtins::Generate_JSBuiltinsConstructStubForDerived(
687 MacroAssembler* masm) {
688 Generate_JSConstructStubHelper(masm, false, false, true);
689}
690
Ben Murdochc5610432016-08-08 18:44:38 +0100691// static
692void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
693 // ----------- S t a t e -------------
694 // -- r2 : the value to pass to the generator
695 // -- r3 : the JSGeneratorObject to resume
696 // -- r4 : the resume mode (tagged)
697 // -- lr : return address
698 // -----------------------------------
699 __ AssertGeneratorObject(r3);
700
701 // Store input value into generator object.
702 __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOffset), r0);
703 __ RecordWriteField(r3, JSGeneratorObject::kInputOffset, r2, r5,
704 kLRHasNotBeenSaved, kDontSaveFPRegs);
705
706 // Store resume mode into generator object.
707 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset));
708
709 // Load suspended function and context.
710 __ LoadP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset));
711 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
712
713 // Flood function if we are stepping.
714 Label skip_flooding;
715 ExternalReference step_in_enabled =
716 ExternalReference::debug_step_in_enabled_address(masm->isolate());
717 __ mov(ip, Operand(step_in_enabled));
718 __ LoadlB(ip, MemOperand(ip));
719 __ CmpP(ip, Operand::Zero());
720 __ beq(&skip_flooding);
721 {
722 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
723 __ Push(r3, r4, r6);
724 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
725 __ Pop(r3, r4);
726 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
727 }
728 __ bind(&skip_flooding);
729
730 // Push receiver.
731 __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
732 __ Push(ip);
733
734 // ----------- S t a t e -------------
735 // -- r3 : the JSGeneratorObject to resume
736 // -- r4 : the resume mode (tagged)
737 // -- r6 : generator function
738 // -- cp : generator context
739 // -- lr : return address
740 // -- sp[0] : generator receiver
741 // -----------------------------------
742
743 // Push holes for arguments to generator function. Since the parser forced
744 // context allocation for any variables in generators, the actual argument
745 // values have already been copied into the context and these dummy values
746 // will never be used.
747 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
748 __ LoadW(
749 r2, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
750 {
751 Label loop, done_loop;
752 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
753#if V8_TARGET_ARCH_S390X
754 __ CmpP(r2, Operand::Zero());
755 __ beq(&done_loop);
756#else
757 __ SmiUntag(r2);
758 __ LoadAndTestP(r2, r2);
759 __ beq(&done_loop);
760#endif
761 __ LoadRR(r1, r2);
762 __ bind(&loop);
763 __ push(ip);
764 __ BranchOnCount(r1, &loop);
765 __ bind(&done_loop);
766 }
767
768 // Dispatch on the kind of generator object.
769 Label old_generator;
770 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
771 __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE);
772 __ bne(&old_generator, Label::kNear);
773
774 // New-style (ignition/turbofan) generator object
775 {
776 // We abuse new.target both to indicate that this is a resume call and to
777 // pass in the generator object. In ordinary calls, new.target is always
778 // undefined because generator functions are non-constructable.
779 __ LoadRR(r5, r3);
780 __ LoadRR(r3, r6);
781 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
782 __ JumpToJSEntry(ip);
783 }
784 // Old-style (full-codegen) generator object
785 __ bind(&old_generator);
786 {
787 // Enter a new JavaScript frame, and initialize its slots as they were when
788 // the generator was suspended.
789 FrameScope scope(masm, StackFrame::MANUAL);
790 __ PushStandardFrame(r6);
791
792 // Restore the operand stack.
793 __ LoadP(r2, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset));
794 __ LoadP(r5, FieldMemOperand(r2, FixedArray::kLengthOffset));
795 __ AddP(r2, r2,
796 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
797 {
798 Label loop, done_loop;
799 __ SmiUntag(r5);
800 __ LoadAndTestP(r5, r5);
801 __ beq(&done_loop);
802 __ LoadRR(r1, r5);
803 __ bind(&loop);
804 __ LoadP(ip, MemOperand(r2, kPointerSize));
805 __ la(r2, MemOperand(r2, kPointerSize));
806 __ Push(ip);
807 __ BranchOnCount(r1, &loop);
808 __ bind(&done_loop);
809 }
810
811 // Reset operand stack so we don't leak.
812 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
813 __ StoreP(ip, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset),
814 r0);
815
816 // Resume the generator function at the continuation.
817 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
818 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset));
819 __ AddP(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
820 {
821 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
822 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset));
823 __ SmiUntag(r4);
824 __ AddP(r5, r5, r4);
825 __ LoadSmiLiteral(r4,
826 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
827 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
828 r0);
829 __ LoadRR(r2, r3); // Continuation expects generator object in r2.
830 __ Jump(r5);
831 }
832 }
833}
834
Ben Murdochda12d292016-06-02 14:46:10 +0100835void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
836 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
837 __ push(r3);
838 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
839}
840
841enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
842
843// Clobbers r4; preserves all other registers.
844static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
845 IsTagged argc_is_tagged) {
846 // Check the stack for overflow. We are not trying to catch
847 // interruptions (e.g. debug break and preemption) here, so the "real stack
848 // limit" is checked.
849 Label okay;
850 __ LoadRoot(r4, Heap::kRealStackLimitRootIndex);
851 // Make r4 the space we have left. The stack might already be overflowed
852 // here which will cause r4 to become negative.
853 __ SubP(r4, sp, r4);
854 // Check if the arguments will overflow the stack.
855 if (argc_is_tagged == kArgcIsSmiTagged) {
856 __ SmiToPtrArrayOffset(r0, argc);
857 } else {
858 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
859 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
860 }
861 __ CmpP(r4, r0);
862 __ bgt(&okay); // Signed comparison.
863
864 // Out of stack space.
865 __ CallRuntime(Runtime::kThrowStackOverflow);
866
867 __ bind(&okay);
868}
869
870static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
871 bool is_construct) {
872 // Called from Generate_JS_Entry
873 // r2: new.target
874 // r3: function
875 // r4: receiver
876 // r5: argc
877 // r6: argv
878 // r0,r7-r9, cp may be clobbered
879 ProfileEntryHookStub::MaybeCallEntryHook(masm);
880
881 // Enter an internal frame.
882 {
883 // FrameScope ends up calling MacroAssembler::EnterFrame here
884 FrameScope scope(masm, StackFrame::INTERNAL);
885
886 // Setup the context (we need to use the caller context from the isolate).
887 ExternalReference context_address(Isolate::kContextAddress,
888 masm->isolate());
889 __ mov(cp, Operand(context_address));
890 __ LoadP(cp, MemOperand(cp));
891
892 __ InitializeRootRegister();
893
894 // Push the function and the receiver onto the stack.
895 __ Push(r3, r4);
896
897 // Check if we have enough stack space to push all arguments.
898 // Clobbers r4.
899 Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt);
900
901 // Copy arguments to the stack in a loop from argv to sp.
902 // The arguments are actually placed in reverse order on sp
903 // compared to argv (i.e. arg1 is highest memory in sp).
904 // r3: function
905 // r5: argc
906 // r6: argv, i.e. points to first arg
907 // r7: scratch reg to hold scaled argc
908 // r8: scratch reg to hold arg handle
909 // r9: scratch reg to hold index into argv
910 Label argLoop, argExit;
911 intptr_t zero = 0;
912 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2));
913 __ SubRR(sp, r7); // Buy the stack frame to fit args
914 __ LoadImmP(r9, Operand(zero)); // Initialize argv index
915 __ bind(&argLoop);
916 __ CmpPH(r7, Operand(zero));
917 __ beq(&argExit, Label::kNear);
918 __ lay(r7, MemOperand(r7, -kPointerSize));
919 __ LoadP(r8, MemOperand(r9, r6)); // read next parameter
920 __ la(r9, MemOperand(r9, kPointerSize)); // r9++;
921 __ LoadP(r0, MemOperand(r8)); // dereference handle
922 __ StoreP(r0, MemOperand(r7, sp)); // push parameter
923 __ b(&argLoop);
924 __ bind(&argExit);
925
926 // Setup new.target and argc.
927 __ LoadRR(r6, r2);
928 __ LoadRR(r2, r5);
929 __ LoadRR(r5, r6);
930
931 // Initialize all JavaScript callee-saved registers, since they will be seen
932 // by the garbage collector as part of handlers.
933 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
934 __ LoadRR(r7, r6);
935 __ LoadRR(r8, r6);
936 __ LoadRR(r9, r6);
937
938 // Invoke the code.
939 Handle<Code> builtin = is_construct
940 ? masm->isolate()->builtins()->Construct()
941 : masm->isolate()->builtins()->Call();
942 __ Call(builtin, RelocInfo::CODE_TARGET);
943
944 // Exit the JS frame and remove the parameters (except function), and
945 // return.
946 }
947 __ b(r14);
948
949 // r2: result
950}
951
952void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
953 Generate_JSEntryTrampolineHelper(masm, false);
954}
955
956void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
957 Generate_JSEntryTrampolineHelper(masm, true);
958}
959
960// Generate code for entering a JS function with the interpreter.
961// On entry to the function the receiver and arguments have been pushed on the
962// stack left to right. The actual argument count matches the formal parameter
963// count expected by the function.
964//
965// The live registers are:
966// o r3: the JS function object being called.
967// o r5: the new target
968// o cp: our context
969// o pp: the caller's constant pool pointer (if enabled)
970// o fp: the caller's frame pointer
971// o sp: stack pointer
972// o lr: return address
973//
974// The function builds an interpreter frame. See InterpreterFrameConstants in
975// frames.h for its layout.
976void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100977 ProfileEntryHookStub::MaybeCallEntryHook(masm);
978
Ben Murdochda12d292016-06-02 14:46:10 +0100979 // Open a frame scope to indicate that there is a frame on the stack. The
980 // MANUAL indicates that the scope shouldn't actually generate code to set up
981 // the frame (that is done below).
982 FrameScope frame_scope(masm, StackFrame::MANUAL);
983 __ PushStandardFrame(r3);
984
Ben Murdochc5610432016-08-08 18:44:38 +0100985 // Get the bytecode array from the function object (or from the DebugInfo if
986 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdochda12d292016-06-02 14:46:10 +0100987 __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
988 Label array_done;
989 Register debug_info = r4;
990 DCHECK(!debug_info.is(r2));
991 __ LoadP(debug_info,
992 FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
993 // Load original bytecode array or the debug copy.
994 __ LoadP(kInterpreterBytecodeArrayRegister,
995 FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
996 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
997 __ beq(&array_done);
998 __ LoadP(kInterpreterBytecodeArrayRegister,
999 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1000 __ bind(&array_done);
1001
Ben Murdochc5610432016-08-08 18:44:38 +01001002 // Check function data field is actually a BytecodeArray object.
1003 Label bytecode_array_not_present;
1004 __ CompareRoot(kInterpreterBytecodeArrayRegister,
1005 Heap::kUndefinedValueRootIndex);
1006 __ beq(&bytecode_array_not_present);
1007
Ben Murdochda12d292016-06-02 14:46:10 +01001008 if (FLAG_debug_code) {
Ben Murdochda12d292016-06-02 14:46:10 +01001009 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1010 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1011 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
1012 BYTECODE_ARRAY_TYPE);
1013 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1014 }
1015
Ben Murdochc5610432016-08-08 18:44:38 +01001016 // Load the initial bytecode offset.
1017 __ mov(kInterpreterBytecodeOffsetRegister,
1018 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1019
1020 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1021 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
1022 __ Push(r5, kInterpreterBytecodeArrayRegister, r4);
Ben Murdochda12d292016-06-02 14:46:10 +01001023
1024 // Allocate the local and temporary register file on the stack.
1025 {
1026 // Load frame size (word) from the BytecodeArray object.
1027 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1028 BytecodeArray::kFrameSizeOffset));
1029
1030 // Do a stack check to ensure we don't go over the limit.
1031 Label ok;
1032 __ SubP(r5, sp, r4);
1033 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
1034 __ CmpLogicalP(r5, r0);
1035 __ bge(&ok);
1036 __ CallRuntime(Runtime::kThrowStackOverflow);
1037 __ bind(&ok);
1038
1039 // If ok, push undefined as the initial value for all register file entries.
1040 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1041 Label loop, no_args;
1042 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1043 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
1044 __ LoadAndTestP(r4, r4);
1045 __ beq(&no_args);
1046 __ LoadRR(r1, r4);
1047 __ bind(&loop);
1048 __ push(r5);
1049 __ SubP(r1, Operand(1));
1050 __ bne(&loop);
1051 __ bind(&no_args);
1052 }
1053
Ben Murdochc5610432016-08-08 18:44:38 +01001054 // Load accumulator and dispatch table into registers.
Ben Murdochda12d292016-06-02 14:46:10 +01001055 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdochda12d292016-06-02 14:46:10 +01001056 __ mov(kInterpreterDispatchTableRegister,
1057 Operand(ExternalReference::interpreter_dispatch_table_address(
1058 masm->isolate())));
1059
1060 // Dispatch to the first bytecode handler for the function.
1061 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1062 kInterpreterBytecodeOffsetRegister));
1063 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
1064 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
Ben Murdochda12d292016-06-02 14:46:10 +01001065 __ Call(ip);
1066
Ben Murdochc5610432016-08-08 18:44:38 +01001067 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdochda12d292016-06-02 14:46:10 +01001068
Ben Murdochc5610432016-08-08 18:44:38 +01001069 // The return value is in r2.
Ben Murdochda12d292016-06-02 14:46:10 +01001070
Ben Murdochc5610432016-08-08 18:44:38 +01001071 // Get the arguments + reciever count.
1072 __ LoadP(r4, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1073 __ LoadlW(r4, FieldMemOperand(r4, BytecodeArray::kParameterSizeOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01001074
1075 // Leave the frame (also dropping the register file).
1076 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1077
Ben Murdochc5610432016-08-08 18:44:38 +01001078 __ lay(sp, MemOperand(sp, r4));
Ben Murdochda12d292016-06-02 14:46:10 +01001079 __ Ret();
Ben Murdochc5610432016-08-08 18:44:38 +01001080
1081 // If the bytecode array is no longer present, then the underlying function
1082 // has been switched to a different kind of code and we heal the closure by
1083 // switching the code entry field over to the new code object as well.
1084 __ bind(&bytecode_array_not_present);
1085 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1086 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1087 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset));
1088 __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1089 __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0);
1090 __ RecordWriteCodeEntryField(r3, r6, r7);
1091 __ JumpToJSEntry(r6);
Ben Murdochda12d292016-06-02 14:46:10 +01001092}
1093
1094static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
1095 Register count, Register scratch) {
1096 Label loop;
1097 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU
1098 __ LoadRR(r0, count);
1099 __ bind(&loop);
1100 __ LoadP(scratch, MemOperand(index, -kPointerSize));
1101 __ lay(index, MemOperand(index, -kPointerSize));
1102 __ push(scratch);
1103 __ SubP(r0, Operand(1));
1104 __ bne(&loop);
1105}
1106
1107// static
1108void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1109 MacroAssembler* masm, TailCallMode tail_call_mode) {
1110 // ----------- S t a t e -------------
1111 // -- r2 : the number of arguments (not including the receiver)
1112 // -- r4 : the address of the first argument to be pushed. Subsequent
1113 // arguments should be consecutive above this, in the same order as
1114 // they are to be pushed onto the stack.
1115 // -- r3 : the target to call (can be any Object).
1116 // -----------------------------------
1117
1118 // Calculate number of arguments (AddP one for receiver).
1119 __ AddP(r5, r2, Operand(1));
1120
1121 // Push the arguments.
1122 Generate_InterpreterPushArgs(masm, r4, r5, r6);
1123
1124 // Call the target.
1125 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1126 tail_call_mode),
1127 RelocInfo::CODE_TARGET);
1128}
1129
1130// static
1131void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1132 // ----------- S t a t e -------------
1133 // -- r2 : argument count (not including receiver)
1134 // -- r5 : new target
1135 // -- r3 : constructor to call
1136 // -- r4 : address of the first argument
1137 // -----------------------------------
1138
1139 // Push a slot for the receiver to be constructed.
1140 __ LoadImmP(r0, Operand::Zero());
1141 __ push(r0);
1142
1143 // Push the arguments (skip if none).
1144 Label skip;
1145 __ CmpP(r2, Operand::Zero());
1146 __ beq(&skip);
1147 Generate_InterpreterPushArgs(masm, r4, r2, r6);
1148 __ bind(&skip);
1149
1150 // Call the constructor with r2, r3, and r5 unmodified.
1151 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1152}
1153
Ben Murdochc5610432016-08-08 18:44:38 +01001154void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1155 // Set the return address to the correct point in the interpreter entry
1156 // trampoline.
1157 Smi* interpreter_entry_return_pc_offset(
1158 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1159 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1160 __ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1161 __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() +
1162 Code::kHeaderSize - kHeapObjectTag));
1163
1164 // Initialize the dispatch table register.
Ben Murdochda12d292016-06-02 14:46:10 +01001165 __ mov(kInterpreterDispatchTableRegister,
1166 Operand(ExternalReference::interpreter_dispatch_table_address(
1167 masm->isolate())));
1168
Ben Murdochda12d292016-06-02 14:46:10 +01001169 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001170 __ LoadP(kInterpreterBytecodeArrayRegister,
1171 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdochda12d292016-06-02 14:46:10 +01001172
1173 if (FLAG_debug_code) {
1174 // Check function data field is actually a BytecodeArray object.
1175 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1176 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1177 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1178 BYTECODE_ARRAY_TYPE);
1179 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1180 }
1181
1182 // Get the target bytecode offset from the frame.
1183 __ LoadP(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001184 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdochda12d292016-06-02 14:46:10 +01001185 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1186
1187 // Dispatch to the target bytecode.
1188 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1189 kInterpreterBytecodeOffsetRegister));
1190 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
1191 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
Ben Murdochda12d292016-06-02 14:46:10 +01001192 __ Jump(ip);
1193}
1194
Ben Murdochda12d292016-06-02 14:46:10 +01001195void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001196 // ----------- S t a t e -------------
1197 // -- r2 : argument count (preserved for callee)
1198 // -- r5 : new target (preserved for callee)
1199 // -- r3 : target function (preserved for callee)
1200 // -----------------------------------
1201 // First lookup code, maybe we don't need to compile!
1202 Label gotta_call_runtime;
1203 Label maybe_call_runtime;
1204 Label try_shared;
1205 Label loop_top, loop_bottom;
1206
1207 Register closure = r3;
1208 Register map = r8;
1209 Register index = r4;
1210 __ LoadP(map,
1211 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1212 __ LoadP(map,
1213 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1214 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1215 __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
1216 __ blt(&gotta_call_runtime);
1217
1218 // Find literals.
1219 // r9 : native context
1220 // r4 : length / index
1221 // r8 : optimized code map
1222 // r5 : new target
1223 // r3 : closure
1224 Register native_context = r9;
1225 __ LoadP(native_context, NativeContextMemOperand());
1226
1227 __ bind(&loop_top);
1228 Register temp = r1;
1229 Register array_pointer = r7;
1230
1231 // Does the native context match?
1232 __ SmiToPtrArrayOffset(array_pointer, index);
1233 __ AddP(array_pointer, map, array_pointer);
1234 __ LoadP(temp, FieldMemOperand(array_pointer,
1235 SharedFunctionInfo::kOffsetToPreviousContext));
1236 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1237 __ CmpP(temp, native_context);
1238 __ bne(&loop_bottom, Label::kNear);
1239 // OSR id set to none?
1240 __ LoadP(temp,
1241 FieldMemOperand(array_pointer,
1242 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1243 const int bailout_id = BailoutId::None().ToInt();
1244 __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0);
1245 __ bne(&loop_bottom, Label::kNear);
1246 // Literals available?
1247 __ LoadP(temp,
1248 FieldMemOperand(array_pointer,
1249 SharedFunctionInfo::kOffsetToPreviousLiterals));
1250 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1251 __ JumpIfSmi(temp, &gotta_call_runtime);
1252
1253 // Save the literals in the closure.
1254 __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
1255 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r6,
1256 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1257 OMIT_SMI_CHECK);
1258
1259 // Code available?
1260 Register entry = r6;
1261 __ LoadP(entry,
1262 FieldMemOperand(array_pointer,
1263 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1264 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1265 __ JumpIfSmi(entry, &maybe_call_runtime);
1266
1267 // Found literals and code. Get them into the closure and return.
1268 // Store code entry in the closure.
1269 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1270
1271 Label install_optimized_code_and_tailcall;
1272 __ bind(&install_optimized_code_and_tailcall);
1273 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1274 __ RecordWriteCodeEntryField(closure, entry, r7);
1275
1276 // Link the closure into the optimized function list.
1277 // r6 : code entry
1278 // r9: native context
1279 // r3 : closure
1280 __ LoadP(
1281 r7, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1282 __ StoreP(r7, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
1283 r0);
1284 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r7, temp,
1285 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1286 OMIT_SMI_CHECK);
1287 const int function_list_offset =
1288 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1289 __ StoreP(
1290 closure,
1291 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
1292 // Save closure before the write barrier.
1293 __ LoadRR(r7, closure);
1294 __ RecordWriteContextSlot(native_context, function_list_offset, r7, temp,
1295 kLRHasNotBeenSaved, kDontSaveFPRegs);
1296 __ JumpToJSEntry(entry);
1297
1298 __ bind(&loop_bottom);
1299 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
1300 r0);
1301 __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
1302 __ bgt(&loop_top);
1303
1304 // We found neither literals nor code.
1305 __ b(&gotta_call_runtime);
1306
1307 __ bind(&maybe_call_runtime);
1308
1309 // Last possibility. Check the context free optimized code map entry.
1310 __ LoadP(entry,
1311 FieldMemOperand(map, FixedArray::kHeaderSize +
1312 SharedFunctionInfo::kSharedCodeIndex));
1313 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1314 __ JumpIfSmi(entry, &try_shared);
1315
1316 // Store code entry in the closure.
1317 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1318 __ b(&install_optimized_code_and_tailcall);
1319
1320 __ bind(&try_shared);
1321 // Is the full code valid?
1322 __ LoadP(entry,
1323 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1324 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1325 __ LoadlW(r7, FieldMemOperand(entry, Code::kFlagsOffset));
1326 __ DecodeField<Code::KindField>(r7);
1327 __ CmpP(r7, Operand(Code::BUILTIN));
1328 __ beq(&gotta_call_runtime);
1329 // Yes, install the full code.
1330 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1331 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1332 __ RecordWriteCodeEntryField(closure, entry, r7);
1333 __ JumpToJSEntry(entry);
1334
1335 __ bind(&gotta_call_runtime);
Ben Murdochda12d292016-06-02 14:46:10 +01001336 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1337}
1338
Ben Murdochc5610432016-08-08 18:44:38 +01001339void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1340 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1341}
1342
Ben Murdochda12d292016-06-02 14:46:10 +01001343void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1344 GenerateTailCallToReturnedCode(masm,
1345 Runtime::kCompileOptimized_NotConcurrent);
1346}
1347
1348void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1349 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1350}
1351
1352static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1353 // For now, we are relying on the fact that make_code_young doesn't do any
1354 // garbage collection which allows us to save/restore the registers without
1355 // worrying about which of them contain pointers. We also don't build an
1356 // internal frame to make the code faster, since we shouldn't have to do stack
1357 // crawls in MakeCodeYoung. This seems a bit fragile.
1358
1359 // Point r2 at the start of the PlatformCodeAge sequence.
1360 __ CleanseP(r14);
1361 __ SubP(r14, Operand(kCodeAgingSequenceLength));
1362 __ LoadRR(r2, r14);
1363
1364 __ pop(r14);
1365
1366 // The following registers must be saved and restored when calling through to
1367 // the runtime:
1368 // r2 - contains return address (beginning of patch sequence)
1369 // r3 - isolate
1370 // r5 - new target
1371 // lr - return address
1372 FrameScope scope(masm, StackFrame::MANUAL);
1373 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1374 __ PrepareCallCFunction(2, 0, r4);
1375 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1376 __ CallCFunction(
1377 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1378 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1379 __ LoadRR(ip, r2);
1380 __ Jump(ip);
1381}
1382
1383#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1384 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1385 MacroAssembler* masm) { \
1386 GenerateMakeCodeYoungAgainCommon(masm); \
1387 } \
1388 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1389 MacroAssembler* masm) { \
1390 GenerateMakeCodeYoungAgainCommon(masm); \
1391 }
1392CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1393#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1394
1395void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1396 // For now, we are relying on the fact that make_code_young doesn't do any
1397 // garbage collection which allows us to save/restore the registers without
1398 // worrying about which of them contain pointers. We also don't build an
1399 // internal frame to make the code faster, since we shouldn't have to do stack
1400 // crawls in MakeCodeYoung. This seems a bit fragile.
1401
1402 // Point r2 at the start of the PlatformCodeAge sequence.
1403 __ CleanseP(r14);
1404 __ SubP(r14, Operand(kCodeAgingSequenceLength));
1405 __ LoadRR(r2, r14);
1406
1407 __ pop(r14);
1408
1409 // The following registers must be saved and restored when calling through to
1410 // the runtime:
1411 // r2 - contains return address (beginning of patch sequence)
1412 // r3 - isolate
1413 // r5 - new target
1414 // lr - return address
1415 FrameScope scope(masm, StackFrame::MANUAL);
1416 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1417 __ PrepareCallCFunction(2, 0, r4);
1418 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1419 __ CallCFunction(
1420 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1421 2);
1422 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1423 __ LoadRR(ip, r2);
1424
1425 // Perform prologue operations usually performed by the young code stub.
1426 __ PushStandardFrame(r3);
1427
1428 // Jump to point after the code-age stub.
1429 __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength));
1430 __ Jump(r2);
1431}
1432
1433void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1434 GenerateMakeCodeYoungAgainCommon(masm);
1435}
1436
1437void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1438 Generate_MarkCodeAsExecutedOnce(masm);
1439}
1440
1441static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1442 SaveFPRegsMode save_doubles) {
1443 {
1444 FrameScope scope(masm, StackFrame::INTERNAL);
1445
1446 // Preserve registers across notification, this is important for compiled
1447 // stubs that tail call the runtime on deopts passing their parameters in
1448 // registers.
1449 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1450 // Pass the function and deoptimization type to the runtime system.
1451 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1452 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1453 }
1454
1455 __ la(sp, MemOperand(sp, kPointerSize)); // Ignore state
1456 __ Ret(); // Jump to miss handler
1457}
1458
1459void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1460 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1461}
1462
1463void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1464 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1465}
1466
1467static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1468 Deoptimizer::BailoutType type) {
1469 {
1470 FrameScope scope(masm, StackFrame::INTERNAL);
1471 // Pass the function and deoptimization type to the runtime system.
1472 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type)));
1473 __ push(r2);
1474 __ CallRuntime(Runtime::kNotifyDeoptimized);
1475 }
1476
1477 // Get the full codegen state from the stack and untag it -> r8.
1478 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize));
1479 __ SmiUntag(r8);
1480 // Switch on the state.
1481 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001482 __ CmpP(
1483 r8,
1484 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
Ben Murdochda12d292016-06-02 14:46:10 +01001485 __ bne(&with_tos_register);
1486 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state.
1487 __ Ret();
1488
1489 __ bind(&with_tos_register);
Ben Murdochc5610432016-08-08 18:44:38 +01001490 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code());
Ben Murdochda12d292016-06-02 14:46:10 +01001491 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001492 __ CmpP(
1493 r8,
1494 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
Ben Murdochda12d292016-06-02 14:46:10 +01001495 __ bne(&unknown_state);
1496 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state.
1497 __ Ret();
1498
1499 __ bind(&unknown_state);
1500 __ stop("no cases left");
1501}
1502
1503void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1504 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1505}
1506
1507void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1508 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1509}
1510
1511void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1512 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1513}
1514
1515// Clobbers registers {r6, r7, r8, r9}.
1516void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1517 Register function_template_info,
1518 Label* receiver_check_failed) {
1519 Register signature = r6;
1520 Register map = r7;
1521 Register constructor = r8;
1522 Register scratch = r9;
1523
1524 // If there is no signature, return the holder.
1525 __ LoadP(signature, FieldMemOperand(function_template_info,
1526 FunctionTemplateInfo::kSignatureOffset));
1527 Label receiver_check_passed;
1528 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1529 &receiver_check_passed);
1530
1531 // Walk the prototype chain.
1532 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1533 Label prototype_loop_start;
1534 __ bind(&prototype_loop_start);
1535
1536 // Get the constructor, if any.
1537 __ GetMapConstructor(constructor, map, scratch, scratch);
1538 __ CmpP(scratch, Operand(JS_FUNCTION_TYPE));
1539 Label next_prototype;
1540 __ bne(&next_prototype);
1541 Register type = constructor;
1542 __ LoadP(type,
1543 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1544 __ LoadP(type,
1545 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1546
1547 // Loop through the chain of inheriting function templates.
1548 Label function_template_loop;
1549 __ bind(&function_template_loop);
1550
1551 // If the signatures match, we have a compatible receiver.
1552 __ CmpP(signature, type);
1553 __ beq(&receiver_check_passed);
1554
1555 // If the current type is not a FunctionTemplateInfo, load the next prototype
1556 // in the chain.
1557 __ JumpIfSmi(type, &next_prototype);
1558 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1559 __ bne(&next_prototype);
1560
1561 // Otherwise load the parent function template and iterate.
1562 __ LoadP(type,
1563 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1564 __ b(&function_template_loop);
1565
1566 // Load the next prototype.
1567 __ bind(&next_prototype);
1568 __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1569 __ DecodeField<Map::HasHiddenPrototype>(scratch);
1570 __ beq(receiver_check_failed);
1571
1572 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1573 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1574 // Iterate.
1575 __ b(&prototype_loop_start);
1576
1577 __ bind(&receiver_check_passed);
1578}
1579
1580void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1581 // ----------- S t a t e -------------
1582 // -- r2 : number of arguments excluding receiver
1583 // -- r3 : callee
1584 // -- lr : return address
1585 // -- sp[0] : last argument
1586 // -- ...
1587 // -- sp[4 * (argc - 1)] : first argument
1588 // -- sp[4 * argc] : receiver
1589 // -----------------------------------
1590
1591 // Load the FunctionTemplateInfo.
1592 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1593 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1594
1595 // Do the compatible receiver check.
1596 Label receiver_check_failed;
1597 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
1598 __ LoadP(r4, MemOperand(sp, r1));
1599 CompatibleReceiverCheck(masm, r4, r5, &receiver_check_failed);
1600
1601 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1602 // beginning of the code.
1603 __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset));
1604 __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset));
1605 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1606 __ JumpToJSEntry(ip);
1607
1608 // Compatible receiver check failed: throw an Illegal Invocation exception.
1609 __ bind(&receiver_check_failed);
1610 // Drop the arguments (including the receiver);
1611 __ AddP(r1, r1, Operand(kPointerSize));
1612 __ AddP(sp, sp, r1);
1613 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1614}
1615
1616void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1617 // Lookup the function in the JavaScript frame.
1618 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1619 {
1620 FrameScope scope(masm, StackFrame::INTERNAL);
1621 // Pass function as argument.
1622 __ push(r2);
1623 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1624 }
1625
1626 // If the code object is null, just return to the unoptimized code.
1627 Label skip;
1628 __ CmpSmiLiteral(r2, Smi::FromInt(0), r0);
1629 __ bne(&skip);
1630 __ Ret();
1631
1632 __ bind(&skip);
1633
1634 // Load deoptimization data from the code object.
1635 // <deopt_data> = <code>[#deoptimization_data_offset]
1636 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
1637
1638 // Load the OSR entrypoint offset from the deoptimization data.
1639 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1640 __ LoadP(
1641 r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
1642 DeoptimizationInputData::kOsrPcOffsetIndex)));
1643 __ SmiUntag(r3);
1644
1645 // Compute the target address = code_obj + header_size + osr_offset
1646 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1647 __ AddP(r2, r3);
1648 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1649 __ LoadRR(r14, r0);
1650
1651 // And "return" to the OSR entry point of the function.
1652 __ Ret();
1653}
1654
1655// static
1656void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1657 int field_index) {
1658 // ----------- S t a t e -------------
1659 // -- lr : return address
1660 // -- sp[0] : receiver
1661 // -----------------------------------
1662
1663 // 1. Pop receiver into r2 and check that it's actually a JSDate object.
1664 Label receiver_not_date;
1665 {
1666 __ Pop(r2);
1667 __ JumpIfSmi(r2, &receiver_not_date);
1668 __ CompareObjectType(r2, r3, r4, JS_DATE_TYPE);
1669 __ bne(&receiver_not_date);
1670 }
1671
1672 // 2. Load the specified date field, falling back to the runtime as necessary.
1673 if (field_index == JSDate::kDateValue) {
1674 __ LoadP(r2, FieldMemOperand(r2, JSDate::kValueOffset));
1675 } else {
1676 if (field_index < JSDate::kFirstUncachedField) {
1677 Label stamp_mismatch;
1678 __ mov(r3, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1679 __ LoadP(r3, MemOperand(r3));
1680 __ LoadP(ip, FieldMemOperand(r2, JSDate::kCacheStampOffset));
1681 __ CmpP(r3, ip);
1682 __ bne(&stamp_mismatch);
1683 __ LoadP(r2, FieldMemOperand(
1684 r2, JSDate::kValueOffset + field_index * kPointerSize));
1685 __ Ret();
1686 __ bind(&stamp_mismatch);
1687 }
1688 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1689 __ PrepareCallCFunction(2, r3);
1690 __ LoadSmiLiteral(r3, Smi::FromInt(field_index));
1691 __ CallCFunction(
1692 ExternalReference::get_date_field_function(masm->isolate()), 2);
1693 }
1694 __ Ret();
1695
1696 // 3. Raise a TypeError if the receiver is not a date.
1697 __ bind(&receiver_not_date);
1698 __ TailCallRuntime(Runtime::kThrowNotDateError);
1699}
1700
1701// static
Ben Murdochda12d292016-06-02 14:46:10 +01001702void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1703 // ----------- S t a t e -------------
1704 // -- r2 : argc
1705 // -- sp[0] : argArray
1706 // -- sp[4] : thisArg
1707 // -- sp[8] : receiver
1708 // -----------------------------------
1709
1710 // 1. Load receiver into r3, argArray into r2 (if present), remove all
1711 // arguments from the stack (including the receiver), and push thisArg (if
1712 // present) instead.
1713 {
1714 Label skip;
1715 Register arg_size = r4;
1716 Register new_sp = r5;
1717 Register scratch = r6;
1718 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1719 __ AddP(new_sp, sp, arg_size);
1720 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1721 __ LoadRR(scratch, r2);
1722 __ LoadP(r3, MemOperand(new_sp, 0)); // receiver
1723 __ CmpP(arg_size, Operand(kPointerSize));
1724 __ blt(&skip);
1725 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1726 __ beq(&skip);
1727 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1728 __ bind(&skip);
1729 __ LoadRR(sp, new_sp);
1730 __ StoreP(scratch, MemOperand(sp, 0));
1731 }
1732
1733 // ----------- S t a t e -------------
1734 // -- r2 : argArray
1735 // -- r3 : receiver
1736 // -- sp[0] : thisArg
1737 // -----------------------------------
1738
1739 // 2. Make sure the receiver is actually callable.
1740 Label receiver_not_callable;
1741 __ JumpIfSmi(r3, &receiver_not_callable);
1742 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1743 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1744 __ TestBit(r6, Map::kIsCallable);
1745 __ beq(&receiver_not_callable);
1746
1747 // 3. Tail call with no arguments if argArray is null or undefined.
1748 Label no_arguments;
1749 __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments);
1750 __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments);
1751
1752 // 4a. Apply the receiver to the given argArray (passing undefined for
1753 // new.target).
1754 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1755 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1756
1757 // 4b. The argArray is either null or undefined, so we tail call without any
1758 // arguments to the receiver.
1759 __ bind(&no_arguments);
1760 {
1761 __ LoadImmP(r2, Operand::Zero());
1762 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1763 }
1764
1765 // 4c. The receiver is not callable, throw an appropriate TypeError.
1766 __ bind(&receiver_not_callable);
1767 {
1768 __ StoreP(r3, MemOperand(sp, 0));
1769 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1770 }
1771}
1772
1773// static
1774void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1775 // 1. Make sure we have at least one argument.
1776 // r2: actual number of arguments
1777 {
1778 Label done;
1779 __ CmpP(r2, Operand::Zero());
1780 __ bne(&done, Label::kNear);
1781 __ PushRoot(Heap::kUndefinedValueRootIndex);
1782 __ AddP(r2, Operand(1));
1783 __ bind(&done);
1784 }
1785
1786 // r2: actual number of arguments
1787 // 2. Get the callable to call (passed as receiver) from the stack.
1788 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
1789 __ LoadP(r3, MemOperand(sp, r4));
1790
1791 // 3. Shift arguments and return address one slot down on the stack
1792 // (overwriting the original receiver). Adjust argument count to make
1793 // the original first argument the new receiver.
1794 // r2: actual number of arguments
1795 // r3: callable
1796 {
1797 Label loop;
1798 // Calculate the copy start address (destination). Copy end address is sp.
1799 __ AddP(r4, sp, r4);
1800
1801 __ bind(&loop);
1802 __ LoadP(ip, MemOperand(r4, -kPointerSize));
1803 __ StoreP(ip, MemOperand(r4));
1804 __ SubP(r4, Operand(kPointerSize));
1805 __ CmpP(r4, sp);
1806 __ bne(&loop);
1807 // Adjust the actual number of arguments and remove the top element
1808 // (which is a copy of the last argument).
1809 __ SubP(r2, Operand(1));
1810 __ pop();
1811 }
1812
1813 // 4. Call the callable.
1814 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1815}
1816
1817void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1818 // ----------- S t a t e -------------
1819 // -- r2 : argc
1820 // -- sp[0] : argumentsList
1821 // -- sp[4] : thisArgument
1822 // -- sp[8] : target
1823 // -- sp[12] : receiver
1824 // -----------------------------------
1825
1826 // 1. Load target into r3 (if present), argumentsList into r2 (if present),
1827 // remove all arguments from the stack (including the receiver), and push
1828 // thisArgument (if present) instead.
1829 {
1830 Label skip;
1831 Register arg_size = r4;
1832 Register new_sp = r5;
1833 Register scratch = r6;
1834 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1835 __ AddP(new_sp, sp, arg_size);
1836 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1837 __ LoadRR(scratch, r3);
1838 __ LoadRR(r2, r3);
1839 __ CmpP(arg_size, Operand(kPointerSize));
1840 __ blt(&skip);
1841 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
1842 __ beq(&skip);
1843 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1844 __ CmpP(arg_size, Operand(2 * kPointerSize));
1845 __ beq(&skip);
1846 __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1847 __ bind(&skip);
1848 __ LoadRR(sp, new_sp);
1849 __ StoreP(scratch, MemOperand(sp, 0));
1850 }
1851
1852 // ----------- S t a t e -------------
1853 // -- r2 : argumentsList
1854 // -- r3 : target
1855 // -- sp[0] : thisArgument
1856 // -----------------------------------
1857
1858 // 2. Make sure the target is actually callable.
1859 Label target_not_callable;
1860 __ JumpIfSmi(r3, &target_not_callable);
1861 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1862 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1863 __ TestBit(r6, Map::kIsCallable);
1864 __ beq(&target_not_callable);
1865
1866 // 3a. Apply the target to the given argumentsList (passing undefined for
1867 // new.target).
1868 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1869 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1870
1871 // 3b. The target is not callable, throw an appropriate TypeError.
1872 __ bind(&target_not_callable);
1873 {
1874 __ StoreP(r3, MemOperand(sp, 0));
1875 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1876 }
1877}
1878
1879void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1880 // ----------- S t a t e -------------
1881 // -- r2 : argc
1882 // -- sp[0] : new.target (optional)
1883 // -- sp[4] : argumentsList
1884 // -- sp[8] : target
1885 // -- sp[12] : receiver
1886 // -----------------------------------
1887
1888 // 1. Load target into r3 (if present), argumentsList into r2 (if present),
1889 // new.target into r5 (if present, otherwise use target), remove all
1890 // arguments from the stack (including the receiver), and push thisArgument
1891 // (if present) instead.
1892 {
1893 Label skip;
1894 Register arg_size = r4;
1895 Register new_sp = r6;
1896 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1897 __ AddP(new_sp, sp, arg_size);
1898 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1899 __ LoadRR(r2, r3);
1900 __ LoadRR(r5, r3);
1901 __ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined)
1902 __ CmpP(arg_size, Operand(kPointerSize));
1903 __ blt(&skip);
1904 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
1905 __ LoadRR(r5, r3); // new.target defaults to target
1906 __ beq(&skip);
1907 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1908 __ CmpP(arg_size, Operand(2 * kPointerSize));
1909 __ beq(&skip);
1910 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1911 __ bind(&skip);
1912 __ LoadRR(sp, new_sp);
1913 }
1914
1915 // ----------- S t a t e -------------
1916 // -- r2 : argumentsList
1917 // -- r5 : new.target
1918 // -- r3 : target
1919 // -- sp[0] : receiver (undefined)
1920 // -----------------------------------
1921
1922 // 2. Make sure the target is actually a constructor.
1923 Label target_not_constructor;
1924 __ JumpIfSmi(r3, &target_not_constructor);
1925 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1926 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1927 __ TestBit(r6, Map::kIsConstructor);
1928 __ beq(&target_not_constructor);
1929
1930 // 3. Make sure the target is actually a constructor.
1931 Label new_target_not_constructor;
1932 __ JumpIfSmi(r5, &new_target_not_constructor);
1933 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
1934 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1935 __ TestBit(r6, Map::kIsConstructor);
1936 __ beq(&new_target_not_constructor);
1937
1938 // 4a. Construct the target with the given new.target and argumentsList.
1939 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1940
1941 // 4b. The target is not a constructor, throw an appropriate TypeError.
1942 __ bind(&target_not_constructor);
1943 {
1944 __ StoreP(r3, MemOperand(sp, 0));
1945 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1946 }
1947
1948 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1949 __ bind(&new_target_not_constructor);
1950 {
1951 __ StoreP(r5, MemOperand(sp, 0));
1952 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1953 }
1954}
1955
1956static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1957 Label* stack_overflow) {
1958 // ----------- S t a t e -------------
1959 // -- r2 : actual number of arguments
1960 // -- r3 : function (passed through to callee)
1961 // -- r4 : expected number of arguments
1962 // -- r5 : new target (passed through to callee)
1963 // -----------------------------------
1964 // Check the stack for overflow. We are not trying to catch
1965 // interruptions (e.g. debug break and preemption) here, so the "real stack
1966 // limit" is checked.
1967 __ LoadRoot(r7, Heap::kRealStackLimitRootIndex);
1968 // Make r7 the space we have left. The stack might already be overflowed
1969 // here which will cause r7 to become negative.
1970 __ SubP(r7, sp, r7);
1971 // Check if the arguments will overflow the stack.
1972 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
1973 __ CmpP(r7, r0);
1974 __ ble(stack_overflow); // Signed comparison.
1975}
1976
1977static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1978 __ SmiTag(r2);
1979 __ LoadSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1980 // Stack updated as such:
1981 // old SP --->
1982 // R14 Return Addr
1983 // Old FP <--- New FP
1984 // Argument Adapter SMI
1985 // Function
1986 // ArgC as SMI <--- New SP
1987 __ lay(sp, MemOperand(sp, -5 * kPointerSize));
1988
1989 // Cleanse the top nibble of 31-bit pointers.
1990 __ CleanseP(r14);
1991 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize));
1992 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize));
1993 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
1994 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize));
1995 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize));
1996 __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp +
1997 kPointerSize));
1998}
1999
2000static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2001 // ----------- S t a t e -------------
2002 // -- r2 : result being passed through
2003 // -----------------------------------
2004 // Get the number of arguments passed (as a smi), tear down the frame and
2005 // then tear down the parameters.
2006 __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2007 kPointerSize)));
2008 int stack_adjustment = kPointerSize; // adjust for receiver
2009 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
2010 __ SmiToPtrArrayOffset(r3, r3);
2011 __ lay(sp, MemOperand(sp, r3));
2012}
2013
2014// static
2015void Builtins::Generate_Apply(MacroAssembler* masm) {
2016 // ----------- S t a t e -------------
2017 // -- r2 : argumentsList
2018 // -- r3 : target
2019 // -- r5 : new.target (checked to be constructor or undefined)
2020 // -- sp[0] : thisArgument
2021 // -----------------------------------
2022
2023 // Create the list of arguments from the array-like argumentsList.
2024 {
2025 Label create_arguments, create_array, create_runtime, done_create;
2026 __ JumpIfSmi(r2, &create_runtime);
2027
2028 // Load the map of argumentsList into r4.
2029 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
2030
2031 // Load native context into r6.
2032 __ LoadP(r6, NativeContextMemOperand());
2033
2034 // Check if argumentsList is an (unmodified) arguments object.
2035 __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2036 __ CmpP(ip, r4);
2037 __ beq(&create_arguments);
2038 __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX));
2039 __ CmpP(ip, r4);
2040 __ beq(&create_arguments);
2041
2042 // Check if argumentsList is a fast JSArray.
2043 __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE);
2044 __ beq(&create_array);
2045
2046 // Ask the runtime to create the list (actually a FixedArray).
2047 __ bind(&create_runtime);
2048 {
2049 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2050 __ Push(r3, r5, r2);
2051 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2052 __ Pop(r3, r5);
2053 __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2054 __ SmiUntag(r4);
2055 }
2056 __ b(&done_create);
2057
2058 // Try to create the list from an arguments object.
2059 __ bind(&create_arguments);
2060 __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset));
2061 __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
2062 __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset));
2063 __ CmpP(r4, ip);
2064 __ bne(&create_runtime);
2065 __ SmiUntag(r4);
2066 __ LoadRR(r2, r6);
2067 __ b(&done_create);
2068
2069 // Try to create the list from a JSArray object.
2070 __ bind(&create_array);
2071 __ LoadlB(r4, FieldMemOperand(r4, Map::kBitField2Offset));
2072 __ DecodeField<Map::ElementsKindBits>(r4);
2073 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2074 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2075 STATIC_ASSERT(FAST_ELEMENTS == 2);
2076 __ CmpP(r4, Operand(FAST_ELEMENTS));
2077 __ bgt(&create_runtime);
2078 __ CmpP(r4, Operand(FAST_HOLEY_SMI_ELEMENTS));
2079 __ beq(&create_runtime);
2080 __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset));
2081 __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset));
2082 __ SmiUntag(r4);
2083
2084 __ bind(&done_create);
2085 }
2086
2087 // Check for stack overflow.
2088 {
2089 // Check the stack for overflow. We are not trying to catch interruptions
2090 // (i.e. debug break and preemption) here, so check the "real stack limit".
2091 Label done;
2092 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2093 // Make ip the space we have left. The stack might already be overflowed
2094 // here which will cause ip to become negative.
2095 __ SubP(ip, sp, ip);
2096 // Check if the arguments will overflow the stack.
2097 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
2098 __ CmpP(ip, r0); // Signed comparison.
2099 __ bgt(&done);
2100 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2101 __ bind(&done);
2102 }
2103
2104 // ----------- S t a t e -------------
2105 // -- r3 : target
2106 // -- r2 : args (a FixedArray built from argumentsList)
2107 // -- r4 : len (number of elements to push from args)
2108 // -- r5 : new.target (checked to be constructor or undefined)
2109 // -- sp[0] : thisArgument
2110 // -----------------------------------
2111
2112 // Push arguments onto the stack (thisArgument is already on the stack).
2113 {
2114 Label loop, no_args;
2115 __ CmpP(r4, Operand::Zero());
2116 __ beq(&no_args);
2117 __ AddP(r2, r2,
2118 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
2119 __ LoadRR(r1, r4);
2120 __ bind(&loop);
2121 __ LoadP(r0, MemOperand(r2, kPointerSize));
2122 __ la(r2, MemOperand(r2, kPointerSize));
2123 __ push(r0);
2124 __ BranchOnCount(r1, &loop);
2125 __ bind(&no_args);
2126 __ LoadRR(r2, r4);
2127 }
2128
2129 // Dispatch to Call or Construct depending on whether new.target is undefined.
2130 {
2131 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
2132 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2133 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2134 }
2135}
2136
2137namespace {
2138
2139// Drops top JavaScript frame and an arguments adaptor frame below it (if
2140// present) preserving all the arguments prepared for current call.
2141// Does nothing if debugger is currently active.
2142// ES6 14.6.3. PrepareForTailCall
2143//
2144// Stack structure for the function g() tail calling f():
2145//
2146// ------- Caller frame: -------
2147// | ...
2148// | g()'s arg M
2149// | ...
2150// | g()'s arg 1
2151// | g()'s receiver arg
2152// | g()'s caller pc
2153// ------- g()'s frame: -------
2154// | g()'s caller fp <- fp
2155// | g()'s context
2156// | function pointer: g
2157// | -------------------------
2158// | ...
2159// | ...
2160// | f()'s arg N
2161// | ...
2162// | f()'s arg 1
2163// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2164// ----------------------
2165//
2166void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2167 Register scratch1, Register scratch2,
2168 Register scratch3) {
2169 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2170 Comment cmnt(masm, "[ PrepareForTailCall");
2171
2172 // Prepare for tail call only if ES2015 tail call elimination is active.
2173 Label done;
2174 ExternalReference is_tail_call_elimination_enabled =
2175 ExternalReference::is_tail_call_elimination_enabled_address(
2176 masm->isolate());
2177 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
2178 __ LoadlB(scratch1, MemOperand(scratch1));
2179 __ CmpP(scratch1, Operand::Zero());
2180 __ beq(&done);
2181
2182 // Drop possible interpreter handler/stub frame.
2183 {
2184 Label no_interpreter_frame;
2185 __ LoadP(scratch3,
2186 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2187 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
2188 __ bne(&no_interpreter_frame);
2189 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2190 __ bind(&no_interpreter_frame);
2191 }
2192
2193 // Check if next frame is an arguments adaptor frame.
2194 Register caller_args_count_reg = scratch1;
2195 Label no_arguments_adaptor, formal_parameter_count_loaded;
2196 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2197 __ LoadP(
2198 scratch3,
2199 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2200 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2201 __ bne(&no_arguments_adaptor);
2202
2203 // Drop current frame and load arguments count from arguments adaptor frame.
2204 __ LoadRR(fp, scratch2);
2205 __ LoadP(caller_args_count_reg,
2206 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2207 __ SmiUntag(caller_args_count_reg);
2208 __ b(&formal_parameter_count_loaded);
2209
2210 __ bind(&no_arguments_adaptor);
2211 // Load caller's formal parameter count
2212 __ LoadP(scratch1,
2213 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2214 __ LoadP(scratch1,
2215 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2216 __ LoadW(caller_args_count_reg,
2217 FieldMemOperand(scratch1,
2218 SharedFunctionInfo::kFormalParameterCountOffset));
2219#if !V8_TARGET_ARCH_S390X
2220 __ SmiUntag(caller_args_count_reg);
2221#endif
2222
2223 __ bind(&formal_parameter_count_loaded);
2224
2225 ParameterCount callee_args_count(args_reg);
2226 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2227 scratch3);
2228 __ bind(&done);
2229}
2230} // namespace
2231
2232// static
2233void Builtins::Generate_CallFunction(MacroAssembler* masm,
2234 ConvertReceiverMode mode,
2235 TailCallMode tail_call_mode) {
2236 // ----------- S t a t e -------------
2237 // -- r2 : the number of arguments (not including the receiver)
2238 // -- r3 : the function to call (checked to be a JSFunction)
2239 // -----------------------------------
2240 __ AssertFunction(r3);
2241
2242 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2243 // Check that the function is not a "classConstructor".
2244 Label class_constructor;
2245 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2246 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
2247 __ TestBitMask(r5, SharedFunctionInfo::kClassConstructorBits, r0);
2248 __ bne(&class_constructor);
2249
2250 // Enter the context of the function; ToObject has to run in the function
2251 // context, and we also need to take the global proxy from the function
2252 // context in case of conversion.
2253 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
2254 // We need to convert the receiver for non-native sloppy mode functions.
2255 Label done_convert;
2256 __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2257 (1 << SharedFunctionInfo::kNativeBit)));
2258 __ bne(&done_convert);
2259 {
2260 // ----------- S t a t e -------------
2261 // -- r2 : the number of arguments (not including the receiver)
2262 // -- r3 : the function to call (checked to be a JSFunction)
2263 // -- r4 : the shared function info.
2264 // -- cp : the function context.
2265 // -----------------------------------
2266
2267 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2268 // Patch receiver to global proxy.
2269 __ LoadGlobalProxy(r5);
2270 } else {
2271 Label convert_to_object, convert_receiver;
2272 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2));
2273 __ LoadP(r5, MemOperand(sp, r5));
2274 __ JumpIfSmi(r5, &convert_to_object);
2275 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2276 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
2277 __ bge(&done_convert);
2278 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2279 Label convert_global_proxy;
2280 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex,
2281 &convert_global_proxy);
2282 __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object);
2283 __ bind(&convert_global_proxy);
2284 {
2285 // Patch receiver to global proxy.
2286 __ LoadGlobalProxy(r5);
2287 }
2288 __ b(&convert_receiver);
2289 }
2290 __ bind(&convert_to_object);
2291 {
2292 // Convert receiver using ToObject.
2293 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2294 // in the fast case? (fall back to AllocateInNewSpace?)
2295 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2296 __ SmiTag(r2);
2297 __ Push(r2, r3);
2298 __ LoadRR(r2, r5);
2299 ToObjectStub stub(masm->isolate());
2300 __ CallStub(&stub);
2301 __ LoadRR(r5, r2);
2302 __ Pop(r2, r3);
2303 __ SmiUntag(r2);
2304 }
2305 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2306 __ bind(&convert_receiver);
2307 }
2308 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2));
2309 __ StoreP(r5, MemOperand(sp, r6));
2310 }
2311 __ bind(&done_convert);
2312
2313 // ----------- S t a t e -------------
2314 // -- r2 : the number of arguments (not including the receiver)
2315 // -- r3 : the function to call (checked to be a JSFunction)
2316 // -- r4 : the shared function info.
2317 // -- cp : the function context.
2318 // -----------------------------------
2319
2320 if (tail_call_mode == TailCallMode::kAllow) {
2321 PrepareForTailCall(masm, r2, r5, r6, r7);
2322 }
2323
2324 __ LoadW(
2325 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
2326#if !V8_TARGET_ARCH_S390X
2327 __ SmiUntag(r4);
2328#endif
2329 ParameterCount actual(r2);
2330 ParameterCount expected(r4);
2331 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION,
2332 CheckDebugStepCallWrapper());
2333
2334 // The function is a "classConstructor", need to raise an exception.
2335 __ bind(&class_constructor);
2336 {
2337 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2338 __ push(r3);
2339 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2340 }
2341}
2342
2343namespace {
2344
2345void Generate_PushBoundArguments(MacroAssembler* masm) {
2346 // ----------- S t a t e -------------
2347 // -- r2 : the number of arguments (not including the receiver)
2348 // -- r3 : target (checked to be a JSBoundFunction)
2349 // -- r5 : new.target (only in case of [[Construct]])
2350 // -----------------------------------
2351
2352 // Load [[BoundArguments]] into r4 and length of that into r6.
2353 Label no_bound_arguments;
2354 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
2355 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
2356 __ SmiUntag(r6);
2357 __ LoadAndTestP(r6, r6);
2358 __ beq(&no_bound_arguments);
2359 {
2360 // ----------- S t a t e -------------
2361 // -- r2 : the number of arguments (not including the receiver)
2362 // -- r3 : target (checked to be a JSBoundFunction)
2363 // -- r4 : the [[BoundArguments]] (implemented as FixedArray)
2364 // -- r5 : new.target (only in case of [[Construct]])
2365 // -- r6 : the number of [[BoundArguments]]
2366 // -----------------------------------
2367
2368 // Reserve stack space for the [[BoundArguments]].
2369 {
2370 Label done;
2371 __ LoadRR(r8, sp); // preserve previous stack pointer
2372 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
2373 __ SubP(sp, sp, r9);
2374 // Check the stack for overflow. We are not trying to catch interruptions
2375 // (i.e. debug break and preemption) here, so check the "real stack
2376 // limit".
2377 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2378 __ bgt(&done); // Signed comparison.
2379 // Restore the stack pointer.
2380 __ LoadRR(sp, r8);
2381 {
2382 FrameScope scope(masm, StackFrame::MANUAL);
2383 __ EnterFrame(StackFrame::INTERNAL);
2384 __ CallRuntime(Runtime::kThrowStackOverflow);
2385 }
2386 __ bind(&done);
2387 }
2388
2389 // Relocate arguments down the stack.
2390 // -- r2 : the number of arguments (not including the receiver)
2391 // -- r8 : the previous stack pointer
2392 // -- r9: the size of the [[BoundArguments]]
2393 {
2394 Label skip, loop;
2395 __ LoadImmP(r7, Operand::Zero());
2396 __ CmpP(r2, Operand::Zero());
2397 __ beq(&skip);
2398 __ LoadRR(r1, r2);
2399 __ bind(&loop);
2400 __ LoadP(r0, MemOperand(r8, r7));
2401 __ StoreP(r0, MemOperand(sp, r7));
2402 __ AddP(r7, r7, Operand(kPointerSize));
2403 __ BranchOnCount(r1, &loop);
2404 __ bind(&skip);
2405 }
2406
2407 // Copy [[BoundArguments]] to the stack (below the arguments).
2408 {
2409 Label loop;
2410 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2411 __ AddP(r4, r4, r9);
2412 __ LoadRR(r1, r6);
2413 __ bind(&loop);
2414 __ LoadP(r0, MemOperand(r4, -kPointerSize));
2415 __ lay(r4, MemOperand(r4, -kPointerSize));
2416 __ StoreP(r0, MemOperand(sp, r7));
2417 __ AddP(r7, r7, Operand(kPointerSize));
2418 __ BranchOnCount(r1, &loop);
2419 __ AddP(r2, r2, r6);
2420 }
2421 }
2422 __ bind(&no_bound_arguments);
2423}
2424
2425} // namespace
2426
2427// static
2428void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2429 TailCallMode tail_call_mode) {
2430 // ----------- S t a t e -------------
2431 // -- r2 : the number of arguments (not including the receiver)
2432 // -- r3 : the function to call (checked to be a JSBoundFunction)
2433 // -----------------------------------
2434 __ AssertBoundFunction(r3);
2435
2436 if (tail_call_mode == TailCallMode::kAllow) {
2437 PrepareForTailCall(masm, r2, r5, r6, r7);
2438 }
2439
2440 // Patch the receiver to [[BoundThis]].
2441 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
2442 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
2443 __ StoreP(ip, MemOperand(sp, r1));
2444
2445 // Push the [[BoundArguments]] onto the stack.
2446 Generate_PushBoundArguments(masm);
2447
2448 // Call the [[BoundTargetFunction]] via the Call builtin.
2449 __ LoadP(r3,
2450 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2451 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2452 masm->isolate())));
2453 __ LoadP(ip, MemOperand(ip));
2454 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2455 __ JumpToJSEntry(ip);
2456}
2457
2458// static
2459void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2460 TailCallMode tail_call_mode) {
2461 // ----------- S t a t e -------------
2462 // -- r2 : the number of arguments (not including the receiver)
2463 // -- r3 : the target to call (can be any Object).
2464 // -----------------------------------
2465
2466 Label non_callable, non_function, non_smi;
2467 __ JumpIfSmi(r3, &non_callable);
2468 __ bind(&non_smi);
2469 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2470 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2471 RelocInfo::CODE_TARGET, eq);
2472 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2473 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2474 RelocInfo::CODE_TARGET, eq);
2475
2476 // Check if target has a [[Call]] internal method.
2477 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2478 __ TestBit(r6, Map::kIsCallable);
2479 __ beq(&non_callable);
2480
2481 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2482 __ bne(&non_function);
2483
2484 // 0. Prepare for tail call if necessary.
2485 if (tail_call_mode == TailCallMode::kAllow) {
2486 PrepareForTailCall(masm, r2, r5, r6, r7);
2487 }
2488
2489 // 1. Runtime fallback for Proxy [[Call]].
2490 __ Push(r3);
2491 // Increase the arguments size to include the pushed function and the
2492 // existing receiver on the stack.
2493 __ AddP(r2, r2, Operand(2));
2494 // Tail-call to the runtime.
2495 __ JumpToExternalReference(
2496 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2497
2498 // 2. Call to something else, which might have a [[Call]] internal method (if
2499 // not we raise an exception).
2500 __ bind(&non_function);
2501 // Overwrite the original receiver the (original) target.
2502 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2503 __ StoreP(r3, MemOperand(sp, r7));
2504 // Let the "call_as_function_delegate" take care of the rest.
2505 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
2506 __ Jump(masm->isolate()->builtins()->CallFunction(
2507 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2508 RelocInfo::CODE_TARGET);
2509
2510 // 3. Call to something that is not callable.
2511 __ bind(&non_callable);
2512 {
2513 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2514 __ Push(r3);
2515 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2516 }
2517}
2518
2519// static
2520void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2521 // ----------- S t a t e -------------
2522 // -- r2 : the number of arguments (not including the receiver)
2523 // -- r3 : the constructor to call (checked to be a JSFunction)
2524 // -- r5 : the new target (checked to be a constructor)
2525 // -----------------------------------
2526 __ AssertFunction(r3);
2527
2528 // Calling convention for function specific ConstructStubs require
2529 // r4 to contain either an AllocationSite or undefined.
2530 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2531
2532 // Tail call to the function-specific construct stub (still in the caller
2533 // context at this point).
2534 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2535 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
2536 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
2537 __ JumpToJSEntry(ip);
2538}
2539
2540// static
2541void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2542 // ----------- S t a t e -------------
2543 // -- r2 : the number of arguments (not including the receiver)
2544 // -- r3 : the function to call (checked to be a JSBoundFunction)
2545 // -- r5 : the new target (checked to be a constructor)
2546 // -----------------------------------
2547 __ AssertBoundFunction(r3);
2548
2549 // Push the [[BoundArguments]] onto the stack.
2550 Generate_PushBoundArguments(masm);
2551
2552 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2553 Label skip;
2554 __ CmpP(r3, r5);
2555 __ bne(&skip);
2556 __ LoadP(r5,
2557 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2558 __ bind(&skip);
2559
2560 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2561 __ LoadP(r3,
2562 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2563 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2564 __ LoadP(ip, MemOperand(ip));
2565 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2566 __ JumpToJSEntry(ip);
2567}
2568
2569// static
2570void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2571 // ----------- S t a t e -------------
2572 // -- r2 : the number of arguments (not including the receiver)
2573 // -- r3 : the constructor to call (checked to be a JSProxy)
2574 // -- r5 : the new target (either the same as the constructor or
2575 // the JSFunction on which new was invoked initially)
2576 // -----------------------------------
2577
2578 // Call into the Runtime for Proxy [[Construct]].
2579 __ Push(r3, r5);
2580 // Include the pushed new_target, constructor and the receiver.
2581 __ AddP(r2, r2, Operand(3));
2582 // Tail-call to the runtime.
2583 __ JumpToExternalReference(
2584 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2585}
2586
2587// static
2588void Builtins::Generate_Construct(MacroAssembler* masm) {
2589 // ----------- S t a t e -------------
2590 // -- r2 : the number of arguments (not including the receiver)
2591 // -- r3 : the constructor to call (can be any Object)
2592 // -- r5 : the new target (either the same as the constructor or
2593 // the JSFunction on which new was invoked initially)
2594 // -----------------------------------
2595
2596 // Check if target is a Smi.
2597 Label non_constructor;
2598 __ JumpIfSmi(r3, &non_constructor);
2599
2600 // Dispatch based on instance type.
2601 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2602 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2603 RelocInfo::CODE_TARGET, eq);
2604
2605 // Check if target has a [[Construct]] internal method.
2606 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
2607 __ TestBit(r4, Map::kIsConstructor);
2608 __ beq(&non_constructor);
2609
2610 // Only dispatch to bound functions after checking whether they are
2611 // constructors.
2612 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2613 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2614 RelocInfo::CODE_TARGET, eq);
2615
2616 // Only dispatch to proxies after checking whether they are constructors.
2617 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2618 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2619 eq);
2620
2621 // Called Construct on an exotic Object with a [[Construct]] internal method.
2622 {
2623 // Overwrite the original receiver with the (original) target.
2624 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2625 __ StoreP(r3, MemOperand(sp, r7));
2626 // Let the "call_as_constructor_delegate" take care of the rest.
2627 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
2628 __ Jump(masm->isolate()->builtins()->CallFunction(),
2629 RelocInfo::CODE_TARGET);
2630 }
2631
2632 // Called Construct on an Object that doesn't have a [[Construct]] internal
2633 // method.
2634 __ bind(&non_constructor);
2635 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2636 RelocInfo::CODE_TARGET);
2637}
2638
Ben Murdochc5610432016-08-08 18:44:38 +01002639// static
2640void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2641 // ----------- S t a t e -------------
2642 // -- r3 : requested object size (untagged)
2643 // -- lr : return address
2644 // -----------------------------------
2645 __ SmiTag(r3);
2646 __ Push(r3);
2647 __ LoadSmiLiteral(cp, Smi::FromInt(0));
2648 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2649}
2650
2651// static
2652void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2653 // ----------- S t a t e -------------
2654 // -- r3 : requested object size (untagged)
2655 // -- lr : return address
2656 // -----------------------------------
2657 __ SmiTag(r3);
2658 __ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2659 __ Push(r3, r4);
2660 __ LoadSmiLiteral(cp, Smi::FromInt(0));
2661 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2662}
2663
Ben Murdochda12d292016-06-02 14:46:10 +01002664void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2665 // ----------- S t a t e -------------
2666 // -- r2 : actual number of arguments
2667 // -- r3 : function (passed through to callee)
2668 // -- r4 : expected number of arguments
2669 // -- r5 : new target (passed through to callee)
2670 // -----------------------------------
2671
2672 Label invoke, dont_adapt_arguments, stack_overflow;
2673
2674 Label enough, too_few;
2675 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
2676 __ CmpP(r2, r4);
2677 __ blt(&too_few);
2678 __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2679 __ beq(&dont_adapt_arguments);
2680
2681 { // Enough parameters: actual >= expected
2682 __ bind(&enough);
2683 EnterArgumentsAdaptorFrame(masm);
2684 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2685
2686 // Calculate copy start address into r2 and copy end address into r6.
2687 // r2: actual number of arguments as a smi
2688 // r3: function
2689 // r4: expected number of arguments
2690 // r5: new target (passed through to callee)
2691 // ip: code entry to call
2692 __ SmiToPtrArrayOffset(r2, r2);
2693 __ AddP(r2, fp);
2694 // adjust for return address and receiver
2695 __ AddP(r2, r2, Operand(2 * kPointerSize));
2696 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2697 __ SubP(r6, r2, r6);
2698
2699 // Copy the arguments (including the receiver) to the new stack frame.
2700 // r2: copy start address
2701 // r3: function
2702 // r4: expected number of arguments
2703 // r5: new target (passed through to callee)
2704 // r6: copy end address
2705 // ip: code entry to call
2706
2707 Label copy;
2708 __ bind(&copy);
2709 __ LoadP(r0, MemOperand(r2, 0));
2710 __ push(r0);
2711 __ CmpP(r2, r6); // Compare before moving to next argument.
2712 __ lay(r2, MemOperand(r2, -kPointerSize));
2713 __ bne(&copy);
2714
2715 __ b(&invoke);
2716 }
2717
2718 { // Too few parameters: Actual < expected
2719 __ bind(&too_few);
2720
2721 EnterArgumentsAdaptorFrame(masm);
2722 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2723
2724 // Calculate copy start address into r0 and copy end address is fp.
2725 // r2: actual number of arguments as a smi
2726 // r3: function
2727 // r4: expected number of arguments
2728 // r5: new target (passed through to callee)
2729 // ip: code entry to call
2730 __ SmiToPtrArrayOffset(r2, r2);
2731 __ lay(r2, MemOperand(r2, fp));
2732
2733 // Copy the arguments (including the receiver) to the new stack frame.
2734 // r2: copy start address
2735 // r3: function
2736 // r4: expected number of arguments
2737 // r5: new target (passed through to callee)
2738 // ip: code entry to call
2739 Label copy;
2740 __ bind(&copy);
2741 // Adjust load for return address and receiver.
2742 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize));
2743 __ push(r0);
2744 __ CmpP(r2, fp); // Compare before moving to next argument.
2745 __ lay(r2, MemOperand(r2, -kPointerSize));
2746 __ bne(&copy);
2747
2748 // Fill the remaining expected arguments with undefined.
2749 // r3: function
2750 // r4: expected number of argumentus
2751 // ip: code entry to call
2752 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2753 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2754 __ SubP(r6, fp, r6);
2755 // Adjust for frame.
2756 __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2757 2 * kPointerSize));
2758
2759 Label fill;
2760 __ bind(&fill);
2761 __ push(r0);
2762 __ CmpP(sp, r6);
2763 __ bne(&fill);
2764 }
2765
2766 // Call the entry point.
2767 __ bind(&invoke);
2768 __ LoadRR(r2, r4);
2769 // r2 : expected number of arguments
2770 // r3 : function (passed through to callee)
2771 // r5 : new target (passed through to callee)
2772 __ CallJSEntry(ip);
2773
2774 // Store offset of return address for deoptimizer.
2775 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2776
2777 // Exit frame and return.
2778 LeaveArgumentsAdaptorFrame(masm);
2779 __ Ret();
2780
2781 // -------------------------------------------
2782 // Dont adapt arguments.
2783 // -------------------------------------------
2784 __ bind(&dont_adapt_arguments);
2785 __ JumpToJSEntry(ip);
2786
2787 __ bind(&stack_overflow);
2788 {
2789 FrameScope frame(masm, StackFrame::MANUAL);
2790 __ CallRuntime(Runtime::kThrowStackOverflow);
2791 __ bkpt(0);
2792 }
2793}
2794
2795#undef __
2796
2797} // namespace internal
2798} // namespace v8
2799
2800#endif // V8_TARGET_ARCH_S390