blob: a6263cdee8cfa587e7318289a43c9a78be154c44 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005#if V8_TARGET_ARCH_PPC
6
7#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
20void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
23 // -- r3 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 // -- r4 : target
25 // -- r6 : new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 // -- sp[0] : last argument
27 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028 // -- sp[4 * (argc - 1)] : first argument
Emily Bernierd0a1eb72015-03-24 16:35:39 -040029 // -- sp[4 * argc] : receiver
30 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 __ AssertFunction(r4);
32
33 // Make sure we operate in the context of the called function (for example
34 // ConstructStubs implemented in C++ will be run in the context of the caller
35 // instead of the callee, due to the way that [[Construct]] is defined for
36 // ordinary functions).
37 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038
39 // Insert extra arguments.
40 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 switch (extra_args) {
42 case BuiltinExtraArguments::kTarget:
43 __ Push(r4);
44 ++num_extra_args;
45 break;
46 case BuiltinExtraArguments::kNewTarget:
47 __ Push(r6);
48 ++num_extra_args;
49 break;
50 case BuiltinExtraArguments::kTargetAndNewTarget:
51 __ Push(r4, r6);
52 num_extra_args += 2;
53 break;
54 case BuiltinExtraArguments::kNone:
55 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 }
57
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 // JumpToExternalReference expects r3 to contain the number of arguments
Emily Bernierd0a1eb72015-03-24 16:35:39 -040059 // including the receiver and the extra arguments.
60 __ addi(r3, r3, Operand(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
63}
64
65
66// Load the built-in InternalArray function from the current context.
67static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
68 Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 // Load the InternalArray function from the current native context.
70 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071}
72
73
74// Load the built-in Array function from the current context.
75static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 // Load the Array function from the current native context.
77 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078}
79
80
81void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
82 // ----------- S t a t e -------------
83 // -- r3 : number of arguments
84 // -- lr : return address
85 // -- sp[...]: constructor arguments
86 // -----------------------------------
87 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
88
89 // Get the InternalArray function.
90 GenerateLoadInternalArrayFunction(masm, r4);
91
92 if (FLAG_debug_code) {
93 // Initial map for the builtin InternalArray functions should be maps.
94 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
95 __ TestIfSmi(r5, r0);
96 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
97 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
98 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
99 }
100
101 // Run the native code for the InternalArray function called as a normal
102 // function.
103 // tail call a stub
104 InternalArrayConstructorStub stub(masm->isolate());
105 __ TailCallStub(&stub);
106}
107
108
109void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
110 // ----------- S t a t e -------------
111 // -- r3 : number of arguments
112 // -- lr : return address
113 // -- sp[...]: constructor arguments
114 // -----------------------------------
115 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
116
117 // Get the Array function.
118 GenerateLoadArrayFunction(masm, r4);
119
120 if (FLAG_debug_code) {
121 // Initial map for the builtin Array functions should be maps.
122 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
123 __ TestIfSmi(r5, r0);
124 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
125 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
126 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
127 }
128
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000129 __ mr(r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400130 // Run the native code for the Array function called as a normal function.
131 // tail call a stub
132 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
133 ArrayConstructorStub stub(masm->isolate());
134 __ TailCallStub(&stub);
135}
136
137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000138// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
140 // ----------- S t a t e -------------
141 // -- r3 : number of arguments
142 // -- lr : return address
143 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
144 // -- sp[(argc + 1) * 8] : receiver
145 // -----------------------------------
146 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
147 Heap::RootListIndex const root_index =
148 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
149 : Heap::kMinusInfinityValueRootIndex;
150 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
151
152 // Load the accumulator with the default return value (either -Infinity or
153 // +Infinity), with the tagged value in r4 and the double value in d1.
154 __ LoadRoot(r4, root_index);
155 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
156
157 // Setup state for loop
158 // r5: address of arg[0] + kPointerSize
159 // r6: number of slots to drop at exit (arguments + receiver)
160 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
161 __ add(r5, sp, r5);
162 __ addi(r6, r3, Operand(1));
163
164 Label done_loop, loop;
165 __ bind(&loop);
166 {
167 // Check if all parameters done.
168 __ cmpl(r5, sp);
169 __ ble(&done_loop);
170
171 // Load the next parameter tagged value into r3.
172 __ LoadPU(r3, MemOperand(r5, -kPointerSize));
173
174 // Load the double value of the parameter into d2, maybe converting the
175 // parameter to a number first using the ToNumberStub if necessary.
176 Label convert, convert_smi, convert_number, done_convert;
177 __ bind(&convert);
178 __ JumpIfSmi(r3, &convert_smi);
179 __ LoadP(r7, FieldMemOperand(r3, HeapObject::kMapOffset));
180 __ JumpIfRoot(r7, Heap::kHeapNumberMapRootIndex, &convert_number);
181 {
182 // Parameter is not a Number, use the ToNumberStub to convert it.
183 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
184 __ SmiTag(r6);
185 __ Push(r4, r5, r6);
186 ToNumberStub stub(masm->isolate());
187 __ CallStub(&stub);
188 __ Pop(r4, r5, r6);
189 __ SmiUntag(r6);
190 {
191 // Restore the double accumulator value (d1).
192 Label done_restore;
193 __ SmiToDouble(d1, r4);
194 __ JumpIfSmi(r4, &done_restore);
195 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
196 __ bind(&done_restore);
197 }
198 }
199 __ b(&convert);
200 __ bind(&convert_number);
201 __ lfd(d2, FieldMemOperand(r3, HeapNumber::kValueOffset));
202 __ b(&done_convert);
203 __ bind(&convert_smi);
204 __ SmiToDouble(d2, r3);
205 __ bind(&done_convert);
206
207 // Perform the actual comparison with the accumulator value on the left hand
208 // side (d1) and the next parameter value on the right hand side (d2).
209 Label compare_nan, compare_swap;
210 __ fcmpu(d1, d2);
211 __ bunordered(&compare_nan);
212 __ b(cond_done, &loop);
213 __ b(CommuteCondition(cond_done), &compare_swap);
214
215 // Left and right hand side are equal, check for -0 vs. +0.
216 __ TestDoubleIsMinusZero(reg, r7, r8);
217 __ bne(&loop);
218
219 // Update accumulator. Result is on the right hand side.
220 __ bind(&compare_swap);
221 __ fmr(d1, d2);
222 __ mr(r4, r3);
223 __ b(&loop);
224
225 // At least one side is NaN, which means that the result will be NaN too.
226 // We still need to visit the rest of the arguments.
227 __ bind(&compare_nan);
228 __ LoadRoot(r4, Heap::kNanValueRootIndex);
229 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
230 __ b(&loop);
231 }
232
233 __ bind(&done_loop);
234 __ mr(r3, r4);
235 __ Drop(r6);
236 __ Ret();
237}
238
239// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400241 // ----------- S t a t e -------------
242 // -- r3 : number of arguments
243 // -- r4 : constructor function
244 // -- lr : return address
245 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
246 // -- sp[argc * 4] : receiver
247 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 // 1. Load the first argument into r3 and get rid of the rest (including the
250 // receiver).
251 Label no_arguments;
252 {
253 __ cmpi(r3, Operand::Zero());
254 __ beq(&no_arguments);
255 __ subi(r3, r3, Operand(1));
256 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
257 __ LoadPUX(r3, MemOperand(sp, r3));
258 __ Drop(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400259 }
260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 // 2a. Convert the first argument to a number.
262 ToNumberStub stub(masm->isolate());
263 __ TailCallStub(&stub);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 // 2b. No arguments, return +0.
266 __ bind(&no_arguments);
267 __ LoadSmiLiteral(r3, Smi::FromInt(0));
268 __ Ret(1);
269}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271
272// static
273void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400274 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 // -- r3 : number of arguments
276 // -- r4 : constructor function
277 // -- r6 : new target
278 // -- lr : return address
279 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
280 // -- sp[argc * 4] : receiver
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400281 // -----------------------------------
282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 // 1. Make sure we operate in the context of the called function.
284 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400285
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 // 2. Load the first argument into r5 and get rid of the rest (including the
287 // receiver).
288 {
289 Label no_arguments, done;
290 __ cmpi(r3, Operand::Zero());
291 __ beq(&no_arguments);
292 __ subi(r3, r3, Operand(1));
293 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
294 __ LoadPUX(r5, MemOperand(sp, r5));
295 __ Drop(2);
296 __ b(&done);
297 __ bind(&no_arguments);
298 __ LoadSmiLiteral(r5, Smi::FromInt(0));
299 __ Drop(1);
300 __ bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400301 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400302
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000303 // 3. Make sure r5 is a number.
304 {
305 Label done_convert;
306 __ JumpIfSmi(r5, &done_convert);
307 __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE);
308 __ beq(&done_convert);
309 {
310 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
311 __ Push(r4, r6);
312 __ mr(r3, r5);
313 ToNumberStub stub(masm->isolate());
314 __ CallStub(&stub);
315 __ mr(r5, r3);
316 __ Pop(r4, r6);
317 }
318 __ bind(&done_convert);
319 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 // 4. Check if new target and constructor differ.
322 Label new_object;
323 __ cmp(r4, r6);
324 __ bne(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 // 5. Allocate a JSValue wrapper for the number.
327 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400328 __ Ret();
329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 // 6. Fallback to the runtime to create new object.
331 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400332 {
333 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100334 __ Push(r5); // first argument
335 FastNewObjectStub stub(masm->isolate());
336 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400338 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000339 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
340 __ Ret();
341}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400342
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343
344// static
345void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
346 // ----------- S t a t e -------------
347 // -- r3 : number of arguments
348 // -- r4 : constructor function
349 // -- lr : return address
350 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
351 // -- sp[argc * 4] : receiver
352 // -----------------------------------
353
354 // 1. Load the first argument into r3 and get rid of the rest (including the
355 // receiver).
356 Label no_arguments;
357 {
358 __ cmpi(r3, Operand::Zero());
359 __ beq(&no_arguments);
360 __ subi(r3, r3, Operand(1));
361 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
362 __ LoadPUX(r3, MemOperand(sp, r3));
363 __ Drop(2);
364 }
365
366 // 2a. At least one argument, return r3 if it's a string, otherwise
367 // dispatch to appropriate conversion.
368 Label to_string, symbol_descriptive_string;
369 {
370 __ JumpIfSmi(r3, &to_string);
371 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
372 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
373 __ bgt(&to_string);
374 __ beq(&symbol_descriptive_string);
375 __ Ret();
376 }
377
378 // 2b. No arguments, return the empty string (and pop the receiver).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400379 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 {
381 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
382 __ Ret(1);
383 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000385 // 3a. Convert r3 to a string.
386 __ bind(&to_string);
387 {
388 ToStringStub stub(masm->isolate());
389 __ TailCallStub(&stub);
390 }
391
392 // 3b. Convert symbol in r3 to a string.
393 __ bind(&symbol_descriptive_string);
394 {
395 __ Push(r3);
396 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
397 }
398}
399
400
401// static
402void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
403 // ----------- S t a t e -------------
404 // -- r3 : number of arguments
405 // -- r4 : constructor function
406 // -- r6 : new target
407 // -- lr : return address
408 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
409 // -- sp[argc * 4] : receiver
410 // -----------------------------------
411
412 // 1. Make sure we operate in the context of the called function.
413 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
414
415 // 2. Load the first argument into r5 and get rid of the rest (including the
416 // receiver).
417 {
418 Label no_arguments, done;
419 __ cmpi(r3, Operand::Zero());
420 __ beq(&no_arguments);
421 __ subi(r3, r3, Operand(1));
422 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
423 __ LoadPUX(r5, MemOperand(sp, r5));
424 __ Drop(2);
425 __ b(&done);
426 __ bind(&no_arguments);
427 __ LoadRoot(r5, Heap::kempty_stringRootIndex);
428 __ Drop(1);
429 __ bind(&done);
430 }
431
432 // 3. Make sure r5 is a string.
433 {
434 Label convert, done_convert;
435 __ JumpIfSmi(r5, &convert);
436 __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE);
437 __ blt(&done_convert);
438 __ bind(&convert);
439 {
440 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
441 ToStringStub stub(masm->isolate());
442 __ Push(r4, r6);
443 __ mr(r3, r5);
444 __ CallStub(&stub);
445 __ mr(r5, r3);
446 __ Pop(r4, r6);
447 }
448 __ bind(&done_convert);
449 }
450
451 // 4. Check if new target and constructor differ.
452 Label new_object;
453 __ cmp(r4, r6);
454 __ bne(&new_object);
455
456 // 5. Allocate a JSValue wrapper for the string.
457 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
458 __ Ret();
459
460 // 6. Fallback to the runtime to create new object.
461 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400462 {
463 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 __ Push(r5); // first argument
465 FastNewObjectStub stub(masm->isolate());
466 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000467 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400468 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400470 __ Ret();
471}
472
473
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400474static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
475 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
476 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
477 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
478 __ JumpToJSEntry(ip);
479}
480
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
482 Runtime::FunctionId function_id) {
483 // ----------- S t a t e -------------
484 // -- r3 : argument count (preserved for callee)
485 // -- r4 : target function (preserved for callee)
486 // -- r6 : new target (preserved for callee)
487 // -----------------------------------
488 {
489 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
490 // Push the number of arguments to the callee.
491 // Push a copy of the target function and the new target.
492 // Push function as parameter to the runtime call.
493 __ SmiTag(r3);
494 __ Push(r3, r4, r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400495
Ben Murdoch097c5b22016-05-18 11:27:45 +0100496 __ CallRuntime(function_id, 1);
497 __ mr(r5, r3);
498
499 // Restore target function and new target.
500 __ Pop(r3, r4, r6);
501 __ SmiUntag(r3);
502 }
503 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504 __ JumpToJSEntry(ip);
505}
506
507
508void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
509 // Checking whether the queued function is ready for install is optional,
510 // since we come across interrupts and stack checks elsewhere. However,
511 // not checking may delay installing ready functions, and always checking
512 // would be quite expensive. A good compromise is to first check against
513 // stack limit as a cue for an interrupt signal.
514 Label ok;
515 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
516 __ cmpl(sp, ip);
517 __ bge(&ok);
518
Ben Murdoch097c5b22016-05-18 11:27:45 +0100519 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400520
521 __ bind(&ok);
522 GenerateTailCallToSharedCode(masm);
523}
524
525
526static void Generate_JSConstructStubHelper(MacroAssembler* masm,
527 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100528 bool create_implicit_receiver,
529 bool check_derived_construct) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400530 // ----------- S t a t e -------------
531 // -- r3 : number of arguments
532 // -- r4 : constructor function
533 // -- r5 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534 // -- r6 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100535 // -- cp : context
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400536 // -- lr : return address
537 // -- sp[...]: constructor arguments
538 // -----------------------------------
539
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400540 Isolate* isolate = masm->isolate();
541
542 // Enter a construct frame.
543 {
544 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
545
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000546 // Preserve the incoming parameters on the stack.
547 __ AssertUndefinedOrAllocationSite(r5, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 if (!create_implicit_receiver) {
550 __ SmiTag(r7, r3, SetRC);
Ben Murdochda12d292016-06-02 14:46:10 +0100551 __ Push(cp, r5, r7);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552 __ PushRoot(Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400553 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 __ SmiTag(r3);
Ben Murdochda12d292016-06-02 14:46:10 +0100555 __ Push(cp, r5, r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556
Ben Murdoch097c5b22016-05-18 11:27:45 +0100557 // Allocate the new receiver object.
558 __ Push(r4, r6);
559 FastNewObjectStub stub(masm->isolate());
560 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561 __ mr(r7, r3);
562 __ Pop(r4, r6);
563
Ben Murdoch097c5b22016-05-18 11:27:45 +0100564 // ----------- S t a t e -------------
565 // -- r4: constructor function
566 // -- r6: new target
567 // -- r7: newly allocated object
568 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569
570 // Retrieve smi-tagged arguments count from the stack.
571 __ LoadP(r3, MemOperand(sp));
572 __ SmiUntag(r3, SetRC);
573
574 // Push the allocated receiver to the stack. We need two copies
575 // because we may have to return the original one and the calling
576 // conventions dictate that the called function pops the receiver.
577 __ Push(r7, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400578 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400579
580 // Set up pointer to last argument.
581 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
582
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400583 // Copy arguments and receiver to the expression stack.
584 // r3: number of arguments
585 // r4: constructor function
586 // r5: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000587 // r6: new target
588 // cr0: condition indicating whether r3 is zero
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400589 // sp[0]: receiver
590 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 // sp[2]: number of arguments (smi-tagged)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400592 Label loop, no_args;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593 __ beq(&no_args, cr0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400594 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595 __ sub(sp, sp, ip);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400596 __ mtctr(r3);
597 __ bind(&loop);
598 __ subi(ip, ip, Operand(kPointerSize));
599 __ LoadPX(r0, MemOperand(r5, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600 __ StorePX(r0, MemOperand(sp, ip));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400601 __ bdnz(&loop);
602 __ bind(&no_args);
603
604 // Call the function.
605 // r3: number of arguments
606 // r4: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607 // r6: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100608
609 ParameterCount actual(r3);
610 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION,
611 CheckDebugStepCallWrapper());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400612
613 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614 if (create_implicit_receiver && !is_api_function) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400615 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
616 }
617
618 // Restore context from the frame.
619 // r3: result
620 // sp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000621 // sp[1]: number of arguments (smi-tagged)
Ben Murdochda12d292016-06-02 14:46:10 +0100622 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400623
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 if (create_implicit_receiver) {
625 // If the result is an object (in the ECMA sense), we should get rid
626 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
627 // on page 74.
628 Label use_receiver, exit;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400629
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630 // If the result is a smi, it is *not* an object in the ECMA sense.
631 // r3: result
632 // sp[0]: receiver
633 // sp[1]: number of arguments (smi-tagged)
634 __ JumpIfSmi(r3, &use_receiver);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000636 // If the type of the result (stored in its map) is less than
637 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
638 __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE);
639 __ bge(&exit);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000641 // Throw away the result of the constructor invocation and use the
642 // on-stack receiver as the result.
643 __ bind(&use_receiver);
644 __ LoadP(r3, MemOperand(sp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400645
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 // Remove receiver from the stack, remove caller arguments, and
647 // return.
648 __ bind(&exit);
649 // r3: result
650 // sp[0]: receiver (newly allocated object)
651 // sp[1]: number of arguments (smi-tagged)
652 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
653 } else {
654 __ LoadP(r4, MemOperand(sp));
655 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400656
657 // Leave construct frame.
658 }
659
Ben Murdoch097c5b22016-05-18 11:27:45 +0100660 // ES6 9.2.2. Step 13+
661 // Check that the result is not a Smi, indicating that the constructor result
662 // from a derived class is neither undefined nor an Object.
663 if (check_derived_construct) {
664 Label dont_throw;
665 __ JumpIfNotSmi(r3, &dont_throw);
666 {
667 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
668 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
669 }
670 __ bind(&dont_throw);
671 }
672
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400673 __ SmiToPtrArrayOffset(r4, r4);
674 __ add(sp, sp, r4);
675 __ addi(sp, sp, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000676 if (create_implicit_receiver) {
677 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
678 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400679 __ blr();
680}
681
682
683void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100684 Generate_JSConstructStubHelper(masm, false, true, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400685}
686
687
688void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100689 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000690}
691
692
693void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100694 Generate_JSConstructStubHelper(masm, false, false, false);
695}
696
697
698void Builtins::Generate_JSBuiltinsConstructStubForDerived(
699 MacroAssembler* masm) {
700 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701}
702
Ben Murdochc5610432016-08-08 18:44:38 +0100703// static
704void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
705 // ----------- S t a t e -------------
706 // -- r3 : the value to pass to the generator
707 // -- r4 : the JSGeneratorObject to resume
708 // -- r5 : the resume mode (tagged)
709 // -- lr : return address
710 // -----------------------------------
711 __ AssertGeneratorObject(r4);
712
713 // Store input value into generator object.
714 __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOffset), r0);
715 __ RecordWriteField(r4, JSGeneratorObject::kInputOffset, r3, r6,
716 kLRHasNotBeenSaved, kDontSaveFPRegs);
717
718 // Store resume mode into generator object.
719 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kResumeModeOffset), r0);
720
721 // Load suspended function and context.
722 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
723 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
724
725 // Flood function if we are stepping.
726 Label skip_flooding;
727 ExternalReference step_in_enabled =
728 ExternalReference::debug_step_in_enabled_address(masm->isolate());
729 __ mov(ip, Operand(step_in_enabled));
730 __ lbz(ip, MemOperand(ip));
731 __ cmpi(ip, Operand::Zero());
732 __ beq(&skip_flooding);
733 {
734 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
735 __ Push(r4, r5, r7);
736 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
737 __ Pop(r4, r5);
738 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
739 }
740 __ bind(&skip_flooding);
741
742 // Push receiver.
743 __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
744 __ Push(ip);
745
746 // ----------- S t a t e -------------
747 // -- r4 : the JSGeneratorObject to resume
748 // -- r5 : the resume mode (tagged)
749 // -- r7 : generator function
750 // -- cp : generator context
751 // -- lr : return address
752 // -- sp[0] : generator receiver
753 // -----------------------------------
754
755 // Push holes for arguments to generator function. Since the parser forced
756 // context allocation for any variables in generators, the actual argument
757 // values have already been copied into the context and these dummy values
758 // will never be used.
759 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
760 __ LoadWordArith(
761 r3, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
762 {
763 Label loop, done_loop;
764 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
765#if V8_TARGET_ARCH_PPC64
766 __ cmpi(r3, Operand::Zero());
767 __ beq(&done_loop);
768#else
769 __ SmiUntag(r3, SetRC);
770 __ beq(&done_loop, cr0);
771#endif
772 __ mtctr(r3);
773 __ bind(&loop);
774 __ push(ip);
775 __ bdnz(&loop);
776 __ bind(&done_loop);
777 }
778
779 // Dispatch on the kind of generator object.
780 Label old_generator;
781 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
782 __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
783 __ bne(&old_generator);
784
785 // New-style (ignition/turbofan) generator object
786 {
787 // We abuse new.target both to indicate that this is a resume call and to
788 // pass in the generator object. In ordinary calls, new.target is always
789 // undefined because generator functions are non-constructable.
790 __ mr(r6, r4);
791 __ mr(r4, r7);
792 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
793 __ JumpToJSEntry(ip);
794 }
795
796 // Old-style (full-codegen) generator object
797 __ bind(&old_generator);
798 {
799 // Enter a new JavaScript frame, and initialize its slots as they were when
800 // the generator was suspended.
801 FrameScope scope(masm, StackFrame::MANUAL);
802 __ PushStandardFrame(r7);
803
804 // Restore the operand stack.
805 __ LoadP(r3, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
806 __ LoadP(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
807 __ addi(r3, r3,
808 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
809 {
810 Label loop, done_loop;
811 __ SmiUntag(r6, SetRC);
812 __ beq(&done_loop, cr0);
813 __ mtctr(r6);
814 __ bind(&loop);
815 __ LoadPU(ip, MemOperand(r3, kPointerSize));
816 __ Push(ip);
817 __ bdnz(&loop);
818 __ bind(&done_loop);
819 }
820
821 // Reset operand stack so we don't leak.
822 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
823 __ StoreP(ip, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset),
824 r0);
825
826 // Resume the generator function at the continuation.
827 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
828 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset));
829 __ addi(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
830 {
831 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
832 if (FLAG_enable_embedded_constant_pool) {
833 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r6);
834 }
835 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
836 __ SmiUntag(r5);
837 __ add(r6, r6, r5);
838 __ LoadSmiLiteral(r5,
839 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
840 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
841 r0);
842 __ mr(r3, r4); // Continuation expects generator object in r3.
843 __ Jump(r6);
844 }
845 }
846}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000847
848void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
849 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
850 __ push(r4);
851 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
852}
853
854
855enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
856
857
858// Clobbers r5; preserves all other registers.
859static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
860 IsTagged argc_is_tagged) {
861 // Check the stack for overflow. We are not trying to catch
862 // interruptions (e.g. debug break and preemption) here, so the "real stack
863 // limit" is checked.
864 Label okay;
865 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
866 // Make r5 the space we have left. The stack might already be overflowed
867 // here which will cause r5 to become negative.
868 __ sub(r5, sp, r5);
869 // Check if the arguments will overflow the stack.
870 if (argc_is_tagged == kArgcIsSmiTagged) {
871 __ SmiToPtrArrayOffset(r0, argc);
872 } else {
873 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
874 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
875 }
876 __ cmp(r5, r0);
877 __ bgt(&okay); // Signed comparison.
878
879 // Out of stack space.
880 __ CallRuntime(Runtime::kThrowStackOverflow);
881
882 __ bind(&okay);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400883}
884
885
886static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
887 bool is_construct) {
888 // Called from Generate_JS_Entry
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000889 // r3: new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400890 // r4: function
891 // r5: receiver
892 // r6: argc
893 // r7: argv
894 // r0,r8-r9, cp may be clobbered
895 ProfileEntryHookStub::MaybeCallEntryHook(masm);
896
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400897 // Enter an internal frame.
898 {
899 FrameScope scope(masm, StackFrame::INTERNAL);
900
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000901 // Setup the context (we need to use the caller context from the isolate).
902 ExternalReference context_address(Isolate::kContextAddress,
903 masm->isolate());
904 __ mov(cp, Operand(context_address));
905 __ LoadP(cp, MemOperand(cp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400906
907 __ InitializeRootRegister();
908
909 // Push the function and the receiver onto the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000910 __ Push(r4, r5);
911
912 // Check if we have enough stack space to push all arguments.
913 // Clobbers r5.
914 Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400915
916 // Copy arguments to the stack in a loop.
917 // r4: function
918 // r6: argc
919 // r7: argv, i.e. points to first arg
920 Label loop, entry;
921 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
922 __ add(r5, r7, r0);
923 // r5 points past last arg.
924 __ b(&entry);
925 __ bind(&loop);
926 __ LoadP(r8, MemOperand(r7)); // read next parameter
927 __ addi(r7, r7, Operand(kPointerSize));
928 __ LoadP(r0, MemOperand(r8)); // dereference handle
929 __ push(r0); // push parameter
930 __ bind(&entry);
931 __ cmp(r7, r5);
932 __ bne(&loop);
933
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000934 // Setup new.target and argc.
935 __ mr(r7, r3);
936 __ mr(r3, r6);
937 __ mr(r6, r7);
938
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400939 // Initialize all JavaScript callee-saved registers, since they will be seen
940 // by the garbage collector as part of handlers.
941 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
942 __ mr(r14, r7);
943 __ mr(r15, r7);
944 __ mr(r16, r7);
945 __ mr(r17, r7);
946
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000947 // Invoke the code.
948 Handle<Code> builtin = is_construct
949 ? masm->isolate()->builtins()->Construct()
950 : masm->isolate()->builtins()->Call();
951 __ Call(builtin, RelocInfo::CODE_TARGET);
952
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400953 // Exit the JS frame and remove the parameters (except function), and
954 // return.
955 }
956 __ blr();
957
958 // r3: result
959}
960
961
962void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
963 Generate_JSEntryTrampolineHelper(masm, false);
964}
965
966
967void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
968 Generate_JSEntryTrampolineHelper(masm, true);
969}
970
971
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000972// Generate code for entering a JS function with the interpreter.
973// On entry to the function the receiver and arguments have been pushed on the
974// stack left to right. The actual argument count matches the formal parameter
975// count expected by the function.
976//
977// The live registers are:
978// o r4: the JS function object being called.
979// o r6: the new target
980// o cp: our context
981// o pp: the caller's constant pool pointer (if enabled)
982// o fp: the caller's frame pointer
983// o sp: stack pointer
984// o lr: return address
985//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100986// The function builds an interpreter frame. See InterpreterFrameConstants in
987// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100989 ProfileEntryHookStub::MaybeCallEntryHook(masm);
990
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000991 // Open a frame scope to indicate that there is a frame on the stack. The
992 // MANUAL indicates that the scope shouldn't actually generate code to set up
993 // the frame (that is done below).
994 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +0100995 __ PushStandardFrame(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996
Ben Murdochc5610432016-08-08 18:44:38 +0100997 // Get the bytecode array from the function object (or from the DebugInfo if
998 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 Label array_done;
1001 Register debug_info = r5;
1002 DCHECK(!debug_info.is(r3));
1003 __ LoadP(debug_info,
1004 FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset));
1005 // Load original bytecode array or the debug copy.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 __ LoadP(kInterpreterBytecodeArrayRegister,
1007 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001008 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
1009 __ beq(&array_done);
1010 __ LoadP(kInterpreterBytecodeArrayRegister,
1011 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1012 __ bind(&array_done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013
Ben Murdochc5610432016-08-08 18:44:38 +01001014 // Check function data field is actually a BytecodeArray object.
1015 Label bytecode_array_not_present;
1016 __ CompareRoot(kInterpreterBytecodeArrayRegister,
1017 Heap::kUndefinedValueRootIndex);
1018 __ beq(&bytecode_array_not_present);
1019
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001020 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001021 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1022 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1023 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1024 BYTECODE_ARRAY_TYPE);
1025 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1026 }
1027
Ben Murdochc5610432016-08-08 18:44:38 +01001028 // Load initial bytecode offset.
1029 __ mov(kInterpreterBytecodeOffsetRegister,
1030 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1031
1032 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1033 __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001034 __ Push(r6, kInterpreterBytecodeArrayRegister, r3);
1035
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001036 // Allocate the local and temporary register file on the stack.
1037 {
1038 // Load frame size (word) from the BytecodeArray object.
1039 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1040 BytecodeArray::kFrameSizeOffset));
1041
1042 // Do a stack check to ensure we don't go over the limit.
1043 Label ok;
1044 __ sub(r6, sp, r5);
1045 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
1046 __ cmpl(r6, r0);
1047 __ bge(&ok);
1048 __ CallRuntime(Runtime::kThrowStackOverflow);
1049 __ bind(&ok);
1050
1051 // If ok, push undefined as the initial value for all register file entries.
1052 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1053 Label loop, no_args;
1054 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1055 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
1056 __ beq(&no_args, cr0);
1057 __ mtctr(r5);
1058 __ bind(&loop);
1059 __ push(r6);
1060 __ bdnz(&loop);
1061 __ bind(&no_args);
1062 }
1063
Ben Murdochc5610432016-08-08 18:44:38 +01001064 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001066 __ mov(kInterpreterDispatchTableRegister,
1067 Operand(ExternalReference::interpreter_dispatch_table_address(
1068 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001069
1070 // Dispatch to the first bytecode handler for the function.
1071 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1072 kInterpreterBytecodeOffsetRegister));
1073 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1074 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075 __ Call(ip);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001076
Ben Murdochc5610432016-08-08 18:44:38 +01001077 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001078
Ben Murdochc5610432016-08-08 18:44:38 +01001079 // The return value is in r3.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080
Ben Murdochc5610432016-08-08 18:44:38 +01001081 // Get the arguments + reciever count.
1082 __ LoadP(r5, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1083 __ lwz(r5, FieldMemOperand(r5, BytecodeArray::kParameterSizeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084
1085 // Leave the frame (also dropping the register file).
1086 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1087
Ben Murdochc5610432016-08-08 18:44:38 +01001088 __ add(sp, sp, r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001089 __ blr();
Ben Murdochc5610432016-08-08 18:44:38 +01001090
1091 // If the bytecode array is no longer present, then the underlying function
1092 // has been switched to a different kind of code and we heal the closure by
1093 // switching the code entry field over to the new code object as well.
1094 __ bind(&bytecode_array_not_present);
1095 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1096 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1097 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kCodeOffset));
1098 __ addi(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1099 __ StoreP(r7, FieldMemOperand(r4, JSFunction::kCodeEntryOffset), r0);
1100 __ RecordWriteCodeEntryField(r4, r7, r8);
1101 __ JumpToJSEntry(r7);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001102}
1103
1104
1105static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
1106 Register count, Register scratch) {
1107 Label loop;
1108 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU
1109 __ mtctr(count);
1110 __ bind(&loop);
1111 __ LoadPU(scratch, MemOperand(index, -kPointerSize));
1112 __ push(scratch);
1113 __ bdnz(&loop);
1114}
1115
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001116// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001117void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1118 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001119 // ----------- S t a t e -------------
1120 // -- r3 : the number of arguments (not including the receiver)
1121 // -- r5 : the address of the first argument to be pushed. Subsequent
1122 // arguments should be consecutive above this, in the same order as
1123 // they are to be pushed onto the stack.
1124 // -- r4 : the target to call (can be any Object).
1125 // -----------------------------------
1126
1127 // Calculate number of arguments (add one for receiver).
1128 __ addi(r6, r3, Operand(1));
1129
1130 // Push the arguments.
1131 Generate_InterpreterPushArgs(masm, r5, r6, r7);
1132
1133 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001134 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1135 tail_call_mode),
1136 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001137}
1138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001139// static
1140void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1141 // ----------- S t a t e -------------
1142 // -- r3 : argument count (not including receiver)
1143 // -- r6 : new target
1144 // -- r4 : constructor to call
1145 // -- r5 : address of the first argument
1146 // -----------------------------------
1147
1148 // Push a slot for the receiver to be constructed.
1149 __ li(r0, Operand::Zero());
1150 __ push(r0);
1151
1152 // Push the arguments (skip if none).
1153 Label skip;
1154 __ cmpi(r3, Operand::Zero());
1155 __ beq(&skip);
1156 Generate_InterpreterPushArgs(masm, r5, r3, r7);
1157 __ bind(&skip);
1158
1159 // Call the constructor with r3, r4, and r6 unmodified.
1160 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1161}
1162
Ben Murdochc5610432016-08-08 18:44:38 +01001163void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1164 // Set the return address to the correct point in the interpreter entry
1165 // trampoline.
1166 Smi* interpreter_entry_return_pc_offset(
1167 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1168 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1169 __ Move(r5, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1170 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() +
1171 Code::kHeaderSize - kHeapObjectTag));
1172 __ mtlr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001173
Ben Murdochc5610432016-08-08 18:44:38 +01001174 // Initialize the dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001175 __ mov(kInterpreterDispatchTableRegister,
1176 Operand(ExternalReference::interpreter_dispatch_table_address(
1177 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001178
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001179 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001180 __ LoadP(kInterpreterBytecodeArrayRegister,
1181 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001182
1183 if (FLAG_debug_code) {
1184 // Check function data field is actually a BytecodeArray object.
1185 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1186 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1187 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1188 BYTECODE_ARRAY_TYPE);
1189 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1190 }
1191
1192 // Get the target bytecode offset from the frame.
1193 __ LoadP(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001194 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001195 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1196
1197 // Dispatch to the target bytecode.
1198 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1199 kInterpreterBytecodeOffsetRegister));
1200 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1201 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001202 __ Jump(ip);
1203}
1204
1205
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001206void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001207 // ----------- S t a t e -------------
1208 // -- r3 : argument count (preserved for callee)
1209 // -- r6 : new target (preserved for callee)
1210 // -- r4 : target function (preserved for callee)
1211 // -----------------------------------
1212 // First lookup code, maybe we don't need to compile!
1213 Label gotta_call_runtime;
1214 Label maybe_call_runtime;
1215 Label try_shared;
1216 Label loop_top, loop_bottom;
1217
1218 Register closure = r4;
1219 Register map = r9;
1220 Register index = r5;
1221 __ LoadP(map,
1222 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1223 __ LoadP(map,
1224 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1225 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1226 __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
1227 __ blt(&gotta_call_runtime);
1228
1229 // Find literals.
1230 // r10 : native context
1231 // r5 : length / index
1232 // r9 : optimized code map
1233 // r6 : new target
1234 // r4 : closure
1235 Register native_context = r10;
1236 __ LoadP(native_context, NativeContextMemOperand());
1237
1238 __ bind(&loop_top);
1239 Register temp = r11;
1240 Register array_pointer = r8;
1241
1242 // Does the native context match?
1243 __ SmiToPtrArrayOffset(array_pointer, index);
1244 __ add(array_pointer, map, array_pointer);
1245 __ LoadP(temp, FieldMemOperand(array_pointer,
1246 SharedFunctionInfo::kOffsetToPreviousContext));
1247 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1248 __ cmp(temp, native_context);
1249 __ bne(&loop_bottom);
1250 // OSR id set to none?
1251 __ LoadP(temp,
1252 FieldMemOperand(array_pointer,
1253 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1254 const int bailout_id = BailoutId::None().ToInt();
1255 __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0);
1256 __ bne(&loop_bottom);
1257 // Literals available?
1258 __ LoadP(temp,
1259 FieldMemOperand(array_pointer,
1260 SharedFunctionInfo::kOffsetToPreviousLiterals));
1261 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1262 __ JumpIfSmi(temp, &gotta_call_runtime);
1263
1264 // Save the literals in the closure.
1265 __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
1266 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r7,
1267 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1268 OMIT_SMI_CHECK);
1269
1270 // Code available?
1271 Register entry = r7;
1272 __ LoadP(entry,
1273 FieldMemOperand(array_pointer,
1274 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1275 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1276 __ JumpIfSmi(entry, &maybe_call_runtime);
1277
1278 // Found literals and code. Get them into the closure and return.
1279 // Store code entry in the closure.
1280 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1281
1282 Label install_optimized_code_and_tailcall;
1283 __ bind(&install_optimized_code_and_tailcall);
1284 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1285 __ RecordWriteCodeEntryField(closure, entry, r8);
1286
1287 // Link the closure into the optimized function list.
1288 // r7 : code entry
1289 // r10: native context
1290 // r4 : closure
1291 __ LoadP(
1292 r8, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1293 __ StoreP(r8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
1294 r0);
1295 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r8, temp,
1296 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1297 OMIT_SMI_CHECK);
1298 const int function_list_offset =
1299 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1300 __ StoreP(
1301 closure,
1302 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
1303 // Save closure before the write barrier.
1304 __ mr(r8, closure);
1305 __ RecordWriteContextSlot(native_context, function_list_offset, r8, temp,
1306 kLRHasNotBeenSaved, kDontSaveFPRegs);
1307 __ JumpToJSEntry(entry);
1308
1309 __ bind(&loop_bottom);
1310 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
1311 r0);
1312 __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
1313 __ bgt(&loop_top);
1314
1315 // We found neither literals nor code.
1316 __ b(&gotta_call_runtime);
1317
1318 __ bind(&maybe_call_runtime);
1319
1320 // Last possibility. Check the context free optimized code map entry.
1321 __ LoadP(entry,
1322 FieldMemOperand(map, FixedArray::kHeaderSize +
1323 SharedFunctionInfo::kSharedCodeIndex));
1324 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1325 __ JumpIfSmi(entry, &try_shared);
1326
1327 // Store code entry in the closure.
1328 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1329 __ b(&install_optimized_code_and_tailcall);
1330
1331 __ bind(&try_shared);
1332 // Is the full code valid?
1333 __ LoadP(entry,
1334 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1335 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1336 __ lwz(r8, FieldMemOperand(entry, Code::kFlagsOffset));
1337 __ DecodeField<Code::KindField>(r8);
1338 __ cmpi(r8, Operand(Code::BUILTIN));
1339 __ beq(&gotta_call_runtime);
1340 // Yes, install the full code.
1341 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1342 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1343 __ RecordWriteCodeEntryField(closure, entry, r8);
1344 __ JumpToJSEntry(entry);
1345
1346 __ bind(&gotta_call_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001347 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001348}
1349
Ben Murdochc5610432016-08-08 18:44:38 +01001350void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1351 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1352}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001353
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001354void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001355 GenerateTailCallToReturnedCode(masm,
1356 Runtime::kCompileOptimized_NotConcurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001357}
1358
1359
1360void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001361 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001362}
1363
1364
1365static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1366 // For now, we are relying on the fact that make_code_young doesn't do any
1367 // garbage collection which allows us to save/restore the registers without
1368 // worrying about which of them contain pointers. We also don't build an
1369 // internal frame to make the code faster, since we shouldn't have to do stack
1370 // crawls in MakeCodeYoung. This seems a bit fragile.
1371
1372 // Point r3 at the start of the PlatformCodeAge sequence.
1373 __ mr(r3, ip);
1374
1375 // The following registers must be saved and restored when calling through to
1376 // the runtime:
1377 // r3 - contains return address (beginning of patch sequence)
1378 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001379 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001380 // lr - return address
1381 FrameScope scope(masm, StackFrame::MANUAL);
1382 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001383 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001384 __ PrepareCallCFunction(2, 0, r5);
1385 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1386 __ CallCFunction(
1387 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001388 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001389 __ mtlr(r0);
1390 __ mr(ip, r3);
1391 __ Jump(ip);
1392}
1393
1394#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1395 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1396 MacroAssembler* masm) { \
1397 GenerateMakeCodeYoungAgainCommon(masm); \
1398 } \
1399 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1400 MacroAssembler* masm) { \
1401 GenerateMakeCodeYoungAgainCommon(masm); \
1402 }
1403CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1404#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1405
1406
1407void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1408 // For now, we are relying on the fact that make_code_young doesn't do any
1409 // garbage collection which allows us to save/restore the registers without
1410 // worrying about which of them contain pointers. We also don't build an
1411 // internal frame to make the code faster, since we shouldn't have to do stack
1412 // crawls in MakeCodeYoung. This seems a bit fragile.
1413
1414 // Point r3 at the start of the PlatformCodeAge sequence.
1415 __ mr(r3, ip);
1416
1417 // The following registers must be saved and restored when calling through to
1418 // the runtime:
1419 // r3 - contains return address (beginning of patch sequence)
1420 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001421 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001422 // lr - return address
1423 FrameScope scope(masm, StackFrame::MANUAL);
1424 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001425 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001426 __ PrepareCallCFunction(2, 0, r5);
1427 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1428 __ CallCFunction(
1429 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1430 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001432 __ mtlr(r0);
1433 __ mr(ip, r3);
1434
1435 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001436 __ PushStandardFrame(r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001437
1438 // Jump to point after the code-age stub.
1439 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1440 __ Jump(r3);
1441}
1442
1443
1444void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1445 GenerateMakeCodeYoungAgainCommon(masm);
1446}
1447
1448
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001449void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1450 Generate_MarkCodeAsExecutedOnce(masm);
1451}
1452
1453
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001454static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1455 SaveFPRegsMode save_doubles) {
1456 {
1457 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1458
1459 // Preserve registers across notification, this is important for compiled
1460 // stubs that tail call the runtime on deopts passing their parameters in
1461 // registers.
1462 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1463 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001464 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001465 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1466 }
1467
1468 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1469 __ blr(); // Jump to miss handler
1470}
1471
1472
1473void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1474 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1475}
1476
1477
1478void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1479 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1480}
1481
1482
1483static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1484 Deoptimizer::BailoutType type) {
1485 {
1486 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1487 // Pass the function and deoptimization type to the runtime system.
1488 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1489 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001490 __ CallRuntime(Runtime::kNotifyDeoptimized);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001491 }
1492
1493 // Get the full codegen state from the stack and untag it -> r9.
1494 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1495 __ SmiUntag(r9);
1496 // Switch on the state.
1497 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001498 __ cmpi(
1499 r9,
1500 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001501 __ bne(&with_tos_register);
1502 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1503 __ Ret();
1504
1505 __ bind(&with_tos_register);
Ben Murdochc5610432016-08-08 18:44:38 +01001506 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001507 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001508 __ cmpi(
1509 r9,
1510 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001511 __ bne(&unknown_state);
1512 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1513 __ Ret();
1514
1515 __ bind(&unknown_state);
1516 __ stop("no cases left");
1517}
1518
1519
1520void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1521 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1522}
1523
1524
1525void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1526 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1527}
1528
1529
1530void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1531 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1532}
1533
1534
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001535// Clobbers registers {r7, r8, r9, r10}.
1536void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1537 Register function_template_info,
1538 Label* receiver_check_failed) {
1539 Register signature = r7;
1540 Register map = r8;
1541 Register constructor = r9;
1542 Register scratch = r10;
1543
1544 // If there is no signature, return the holder.
1545 __ LoadP(signature, FieldMemOperand(function_template_info,
1546 FunctionTemplateInfo::kSignatureOffset));
1547 Label receiver_check_passed;
1548 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1549 &receiver_check_passed);
1550
1551 // Walk the prototype chain.
1552 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1553 Label prototype_loop_start;
1554 __ bind(&prototype_loop_start);
1555
1556 // Get the constructor, if any.
1557 __ GetMapConstructor(constructor, map, scratch, scratch);
1558 __ cmpi(scratch, Operand(JS_FUNCTION_TYPE));
1559 Label next_prototype;
1560 __ bne(&next_prototype);
1561 Register type = constructor;
1562 __ LoadP(type,
1563 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1564 __ LoadP(type,
1565 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1566
1567 // Loop through the chain of inheriting function templates.
1568 Label function_template_loop;
1569 __ bind(&function_template_loop);
1570
1571 // If the signatures match, we have a compatible receiver.
1572 __ cmp(signature, type);
1573 __ beq(&receiver_check_passed);
1574
1575 // If the current type is not a FunctionTemplateInfo, load the next prototype
1576 // in the chain.
1577 __ JumpIfSmi(type, &next_prototype);
1578 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1579 __ bne(&next_prototype);
1580
1581 // Otherwise load the parent function template and iterate.
1582 __ LoadP(type,
1583 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1584 __ b(&function_template_loop);
1585
1586 // Load the next prototype.
1587 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001588 __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001589 __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 __ beq(receiver_check_failed, cr0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001591
1592 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1593 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594 // Iterate.
1595 __ b(&prototype_loop_start);
1596
1597 __ bind(&receiver_check_passed);
1598}
1599
1600
1601void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1602 // ----------- S t a t e -------------
1603 // -- r3 : number of arguments excluding receiver
1604 // -- r4 : callee
1605 // -- lr : return address
1606 // -- sp[0] : last argument
1607 // -- ...
1608 // -- sp[4 * (argc - 1)] : first argument
1609 // -- sp[4 * argc] : receiver
1610 // -----------------------------------
1611
1612
1613 // Load the FunctionTemplateInfo.
1614 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1615 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
1616
1617 // Do the compatible receiver check.
1618 Label receiver_check_failed;
1619 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2));
1620 __ LoadPX(r5, MemOperand(sp, r11));
1621 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed);
1622
1623 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1624 // beginning of the code.
1625 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset));
1626 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset));
1627 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1628 __ JumpToJSEntry(ip);
1629
1630 // Compatible receiver check failed: throw an Illegal Invocation exception.
1631 __ bind(&receiver_check_failed);
1632 // Drop the arguments (including the receiver);
1633 __ addi(r11, r11, Operand(kPointerSize));
1634 __ add(sp, sp, r11);
1635 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1636}
1637
1638
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001639void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1640 // Lookup the function in the JavaScript frame.
1641 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1642 {
1643 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1644 // Pass function as argument.
1645 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001646 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001647 }
1648
1649 // If the code object is null, just return to the unoptimized code.
1650 Label skip;
1651 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1652 __ bne(&skip);
1653 __ Ret();
1654
1655 __ bind(&skip);
1656
1657 // Load deoptimization data from the code object.
1658 // <deopt_data> = <code>[#deoptimization_data_offset]
1659 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1660
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001661 {
1662 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001663 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1664
1665 if (FLAG_enable_embedded_constant_pool) {
1666 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1667 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001668
1669 // Load the OSR entrypoint offset from the deoptimization data.
1670 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1671 __ LoadP(r4, FieldMemOperand(
1672 r4, FixedArray::OffsetOfElementAt(
1673 DeoptimizationInputData::kOsrPcOffsetIndex)));
1674 __ SmiUntag(r4);
1675
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001676 // Compute the target address = code start + osr_offset
1677 __ add(r0, r3, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001678
1679 // And "return" to the OSR entry point of the function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001680 __ mtlr(r0);
1681 __ blr();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001682 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001683}
1684
1685
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686// static
1687void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1688 int field_index) {
1689 // ----------- S t a t e -------------
1690 // -- lr : return address
1691 // -- sp[0] : receiver
1692 // -----------------------------------
1693
1694 // 1. Pop receiver into r3 and check that it's actually a JSDate object.
1695 Label receiver_not_date;
1696 {
1697 __ Pop(r3);
1698 __ JumpIfSmi(r3, &receiver_not_date);
1699 __ CompareObjectType(r3, r4, r5, JS_DATE_TYPE);
1700 __ bne(&receiver_not_date);
1701 }
1702
1703 // 2. Load the specified date field, falling back to the runtime as necessary.
1704 if (field_index == JSDate::kDateValue) {
1705 __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset));
1706 } else {
1707 if (field_index < JSDate::kFirstUncachedField) {
1708 Label stamp_mismatch;
1709 __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1710 __ LoadP(r4, MemOperand(r4));
1711 __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset));
1712 __ cmp(r4, ip);
1713 __ bne(&stamp_mismatch);
1714 __ LoadP(r3, FieldMemOperand(
1715 r3, JSDate::kValueOffset + field_index * kPointerSize));
1716 __ Ret();
1717 __ bind(&stamp_mismatch);
1718 }
1719 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1720 __ PrepareCallCFunction(2, r4);
1721 __ LoadSmiLiteral(r4, Smi::FromInt(field_index));
1722 __ CallCFunction(
1723 ExternalReference::get_date_field_function(masm->isolate()), 2);
1724 }
1725 __ Ret();
1726
1727 // 3. Raise a TypeError if the receiver is not a date.
1728 __ bind(&receiver_not_date);
1729 __ TailCallRuntime(Runtime::kThrowNotDateError);
1730}
1731
Ben Murdochda12d292016-06-02 14:46:10 +01001732// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001733void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1734 // ----------- S t a t e -------------
1735 // -- r3 : argc
1736 // -- sp[0] : argArray
1737 // -- sp[4] : thisArg
1738 // -- sp[8] : receiver
1739 // -----------------------------------
1740
1741 // 1. Load receiver into r4, argArray into r3 (if present), remove all
1742 // arguments from the stack (including the receiver), and push thisArg (if
1743 // present) instead.
1744 {
1745 Label skip;
1746 Register arg_size = r5;
1747 Register new_sp = r6;
1748 Register scratch = r7;
1749 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1750 __ add(new_sp, sp, arg_size);
1751 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1752 __ mr(scratch, r3);
1753 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver
1754 __ cmpi(arg_size, Operand(kPointerSize));
1755 __ blt(&skip);
1756 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1757 __ beq(&skip);
1758 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1759 __ bind(&skip);
1760 __ mr(sp, new_sp);
1761 __ StoreP(scratch, MemOperand(sp, 0));
1762 }
1763
1764 // ----------- S t a t e -------------
1765 // -- r3 : argArray
1766 // -- r4 : receiver
1767 // -- sp[0] : thisArg
1768 // -----------------------------------
1769
1770 // 2. Make sure the receiver is actually callable.
1771 Label receiver_not_callable;
1772 __ JumpIfSmi(r4, &receiver_not_callable);
1773 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1774 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1775 __ TestBit(r7, Map::kIsCallable, r0);
1776 __ beq(&receiver_not_callable, cr0);
1777
1778 // 3. Tail call with no arguments if argArray is null or undefined.
1779 Label no_arguments;
1780 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
1781 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
1782
1783 // 4a. Apply the receiver to the given argArray (passing undefined for
1784 // new.target).
1785 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1786 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1787
1788 // 4b. The argArray is either null or undefined, so we tail call without any
1789 // arguments to the receiver.
1790 __ bind(&no_arguments);
1791 {
1792 __ li(r3, Operand::Zero());
1793 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1794 }
1795
1796 // 4c. The receiver is not callable, throw an appropriate TypeError.
1797 __ bind(&receiver_not_callable);
1798 {
1799 __ StoreP(r4, MemOperand(sp, 0));
1800 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1801 }
1802}
1803
1804
1805// static
1806void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001807 // 1. Make sure we have at least one argument.
1808 // r3: actual number of arguments
1809 {
1810 Label done;
1811 __ cmpi(r3, Operand::Zero());
1812 __ bne(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001813 __ PushRoot(Heap::kUndefinedValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001814 __ addi(r3, r3, Operand(1));
1815 __ bind(&done);
1816 }
1817
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001818 // 2. Get the callable to call (passed as receiver) from the stack.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001819 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001820 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1821 __ LoadPX(r4, MemOperand(sp, r5));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001822
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 // 3. Shift arguments and return address one slot down on the stack
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001824 // (overwriting the original receiver). Adjust argument count to make
1825 // the original first argument the new receiver.
1826 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001827 // r4: callable
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001828 {
1829 Label loop;
1830 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001831 __ add(r5, sp, r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001832
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001833
1834 __ mtctr(r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001835 __ bind(&loop);
1836 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1837 __ StoreP(ip, MemOperand(r5));
1838 __ subi(r5, r5, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001839 __ bdnz(&loop);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001840 // Adjust the actual number of arguments and remove the top element
1841 // (which is a copy of the last argument).
1842 __ subi(r3, r3, Operand(1));
1843 __ pop();
1844 }
1845
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001846 // 4. Call the callable.
1847 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001848}
1849
1850
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1852 // ----------- S t a t e -------------
1853 // -- r3 : argc
1854 // -- sp[0] : argumentsList
1855 // -- sp[4] : thisArgument
1856 // -- sp[8] : target
1857 // -- sp[12] : receiver
1858 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001859
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001860 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1861 // remove all arguments from the stack (including the receiver), and push
1862 // thisArgument (if present) instead.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001863 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001864 Label skip;
1865 Register arg_size = r5;
1866 Register new_sp = r6;
1867 Register scratch = r7;
1868 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1869 __ add(new_sp, sp, arg_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001870 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001871 __ mr(scratch, r4);
1872 __ mr(r3, r4);
1873 __ cmpi(arg_size, Operand(kPointerSize));
1874 __ blt(&skip);
1875 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1876 __ beq(&skip);
1877 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1878 __ cmpi(arg_size, Operand(2 * kPointerSize));
1879 __ beq(&skip);
1880 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1881 __ bind(&skip);
1882 __ mr(sp, new_sp);
1883 __ StoreP(scratch, MemOperand(sp, 0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001884 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001885
1886 // ----------- S t a t e -------------
1887 // -- r3 : argumentsList
1888 // -- r4 : target
1889 // -- sp[0] : thisArgument
1890 // -----------------------------------
1891
1892 // 2. Make sure the target is actually callable.
1893 Label target_not_callable;
1894 __ JumpIfSmi(r4, &target_not_callable);
1895 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1896 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1897 __ TestBit(r7, Map::kIsCallable, r0);
1898 __ beq(&target_not_callable, cr0);
1899
1900 // 3a. Apply the target to the given argumentsList (passing undefined for
1901 // new.target).
1902 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1903 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1904
1905 // 3b. The target is not callable, throw an appropriate TypeError.
1906 __ bind(&target_not_callable);
1907 {
1908 __ StoreP(r4, MemOperand(sp, 0));
1909 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1910 }
1911}
1912
1913
1914void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1915 // ----------- S t a t e -------------
1916 // -- r3 : argc
1917 // -- sp[0] : new.target (optional)
1918 // -- sp[4] : argumentsList
1919 // -- sp[8] : target
1920 // -- sp[12] : receiver
1921 // -----------------------------------
1922
1923 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1924 // new.target into r6 (if present, otherwise use target), remove all
1925 // arguments from the stack (including the receiver), and push thisArgument
1926 // (if present) instead.
1927 {
1928 Label skip;
1929 Register arg_size = r5;
1930 Register new_sp = r7;
1931 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1932 __ add(new_sp, sp, arg_size);
1933 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1934 __ mr(r3, r4);
1935 __ mr(r6, r4);
1936 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
1937 __ cmpi(arg_size, Operand(kPointerSize));
1938 __ blt(&skip);
1939 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1940 __ mr(r6, r4); // new.target defaults to target
1941 __ beq(&skip);
1942 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1943 __ cmpi(arg_size, Operand(2 * kPointerSize));
1944 __ beq(&skip);
1945 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1946 __ bind(&skip);
1947 __ mr(sp, new_sp);
1948 }
1949
1950 // ----------- S t a t e -------------
1951 // -- r3 : argumentsList
1952 // -- r6 : new.target
1953 // -- r4 : target
1954 // -- sp[0] : receiver (undefined)
1955 // -----------------------------------
1956
1957 // 2. Make sure the target is actually a constructor.
1958 Label target_not_constructor;
1959 __ JumpIfSmi(r4, &target_not_constructor);
1960 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1961 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1962 __ TestBit(r7, Map::kIsConstructor, r0);
1963 __ beq(&target_not_constructor, cr0);
1964
1965 // 3. Make sure the target is actually a constructor.
1966 Label new_target_not_constructor;
1967 __ JumpIfSmi(r6, &new_target_not_constructor);
1968 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
1969 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1970 __ TestBit(r7, Map::kIsConstructor, r0);
1971 __ beq(&new_target_not_constructor, cr0);
1972
1973 // 4a. Construct the target with the given new.target and argumentsList.
1974 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1975
1976 // 4b. The target is not a constructor, throw an appropriate TypeError.
1977 __ bind(&target_not_constructor);
1978 {
1979 __ StoreP(r4, MemOperand(sp, 0));
1980 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1981 }
1982
1983 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1984 __ bind(&new_target_not_constructor);
1985 {
1986 __ StoreP(r6, MemOperand(sp, 0));
1987 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1988 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001989}
1990
1991
1992static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1993 Label* stack_overflow) {
1994 // ----------- S t a t e -------------
1995 // -- r3 : actual number of arguments
1996 // -- r4 : function (passed through to callee)
1997 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001998 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001999 // -----------------------------------
2000 // Check the stack for overflow. We are not trying to catch
2001 // interruptions (e.g. debug break and preemption) here, so the "real stack
2002 // limit" is checked.
2003 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
2004 // Make r8 the space we have left. The stack might already be overflowed
2005 // here which will cause r8 to become negative.
2006 __ sub(r8, sp, r8);
2007 // Check if the arguments will overflow the stack.
2008 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
2009 __ cmp(r8, r0);
2010 __ ble(stack_overflow); // Signed comparison.
2011}
2012
2013
2014static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2015 __ SmiTag(r3);
2016 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2017 __ mflr(r0);
2018 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002019 if (FLAG_enable_embedded_constant_pool) {
2020 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
2021 } else {
2022 __ Push(fp, r7, r4, r3);
2023 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002024 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2025 kPointerSize));
2026}
2027
2028
2029static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2030 // ----------- S t a t e -------------
2031 // -- r3 : result being passed through
2032 // -----------------------------------
2033 // Get the number of arguments passed (as a smi), tear down the frame and
2034 // then tear down the parameters.
2035 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2036 kPointerSize)));
2037 int stack_adjustment = kPointerSize; // adjust for receiver
2038 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
2039 __ SmiToPtrArrayOffset(r0, r4);
2040 __ add(sp, sp, r0);
2041}
2042
2043
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002044// static
2045void Builtins::Generate_Apply(MacroAssembler* masm) {
2046 // ----------- S t a t e -------------
2047 // -- r3 : argumentsList
2048 // -- r4 : target
2049 // -- r6 : new.target (checked to be constructor or undefined)
2050 // -- sp[0] : thisArgument
2051 // -----------------------------------
2052
2053 // Create the list of arguments from the array-like argumentsList.
2054 {
2055 Label create_arguments, create_array, create_runtime, done_create;
2056 __ JumpIfSmi(r3, &create_runtime);
2057
2058 // Load the map of argumentsList into r5.
2059 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
2060
2061 // Load native context into r7.
2062 __ LoadP(r7, NativeContextMemOperand());
2063
2064 // Check if argumentsList is an (unmodified) arguments object.
2065 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2066 __ cmp(ip, r5);
2067 __ beq(&create_arguments);
2068 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
2069 __ cmp(ip, r5);
2070 __ beq(&create_arguments);
2071
2072 // Check if argumentsList is a fast JSArray.
2073 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
2074 __ beq(&create_array);
2075
2076 // Ask the runtime to create the list (actually a FixedArray).
2077 __ bind(&create_runtime);
2078 {
2079 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2080 __ Push(r4, r6, r3);
2081 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2082 __ Pop(r4, r6);
2083 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
2084 __ SmiUntag(r5);
2085 }
2086 __ b(&done_create);
2087
2088 // Try to create the list from an arguments object.
2089 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002090 __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002091 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
2092 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
2093 __ cmp(r5, ip);
2094 __ bne(&create_runtime);
2095 __ SmiUntag(r5);
2096 __ mr(r3, r7);
2097 __ b(&done_create);
2098
2099 // Try to create the list from a JSArray object.
2100 __ bind(&create_array);
2101 __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset));
2102 __ DecodeField<Map::ElementsKindBits>(r5);
2103 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2104 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2105 STATIC_ASSERT(FAST_ELEMENTS == 2);
2106 __ cmpi(r5, Operand(FAST_ELEMENTS));
2107 __ bgt(&create_runtime);
2108 __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS));
2109 __ beq(&create_runtime);
2110 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
2111 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
2112 __ SmiUntag(r5);
2113
2114 __ bind(&done_create);
2115 }
2116
2117 // Check for stack overflow.
2118 {
2119 // Check the stack for overflow. We are not trying to catch interruptions
2120 // (i.e. debug break and preemption) here, so check the "real stack limit".
2121 Label done;
2122 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2123 // Make ip the space we have left. The stack might already be overflowed
2124 // here which will cause ip to become negative.
2125 __ sub(ip, sp, ip);
2126 // Check if the arguments will overflow the stack.
2127 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
2128 __ cmp(ip, r0); // Signed comparison.
2129 __ bgt(&done);
2130 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2131 __ bind(&done);
2132 }
2133
2134 // ----------- S t a t e -------------
2135 // -- r4 : target
2136 // -- r3 : args (a FixedArray built from argumentsList)
2137 // -- r5 : len (number of elements to push from args)
2138 // -- r6 : new.target (checked to be constructor or undefined)
2139 // -- sp[0] : thisArgument
2140 // -----------------------------------
2141
2142 // Push arguments onto the stack (thisArgument is already on the stack).
2143 {
2144 Label loop, no_args;
2145 __ cmpi(r5, Operand::Zero());
2146 __ beq(&no_args);
2147 __ addi(r3, r3,
2148 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
2149 __ mtctr(r5);
2150 __ bind(&loop);
2151 __ LoadPU(r0, MemOperand(r3, kPointerSize));
2152 __ push(r0);
2153 __ bdnz(&loop);
2154 __ bind(&no_args);
2155 __ mr(r3, r5);
2156 }
2157
2158 // Dispatch to Call or Construct depending on whether new.target is undefined.
2159 {
2160 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
2161 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2162 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2163 }
2164}
2165
Ben Murdoch097c5b22016-05-18 11:27:45 +01002166namespace {
2167
2168// Drops top JavaScript frame and an arguments adaptor frame below it (if
2169// present) preserving all the arguments prepared for current call.
2170// Does nothing if debugger is currently active.
2171// ES6 14.6.3. PrepareForTailCall
2172//
2173// Stack structure for the function g() tail calling f():
2174//
2175// ------- Caller frame: -------
2176// | ...
2177// | g()'s arg M
2178// | ...
2179// | g()'s arg 1
2180// | g()'s receiver arg
2181// | g()'s caller pc
2182// ------- g()'s frame: -------
2183// | g()'s caller fp <- fp
2184// | g()'s context
2185// | function pointer: g
2186// | -------------------------
2187// | ...
2188// | ...
2189// | f()'s arg N
2190// | ...
2191// | f()'s arg 1
2192// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2193// ----------------------
2194//
2195void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2196 Register scratch1, Register scratch2,
2197 Register scratch3) {
2198 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2199 Comment cmnt(masm, "[ PrepareForTailCall");
2200
Ben Murdochda12d292016-06-02 14:46:10 +01002201 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002202 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002203 ExternalReference is_tail_call_elimination_enabled =
2204 ExternalReference::is_tail_call_elimination_enabled_address(
2205 masm->isolate());
2206 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002207 __ lbz(scratch1, MemOperand(scratch1));
2208 __ cmpi(scratch1, Operand::Zero());
Ben Murdochda12d292016-06-02 14:46:10 +01002209 __ beq(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002210
2211 // Drop possible interpreter handler/stub frame.
2212 {
2213 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002214 __ LoadP(scratch3,
2215 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002216 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
2217 __ bne(&no_interpreter_frame);
2218 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2219 __ bind(&no_interpreter_frame);
2220 }
2221
2222 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002223 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002224 Label no_arguments_adaptor, formal_parameter_count_loaded;
2225 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002226 __ LoadP(
2227 scratch3,
2228 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002229 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2230 __ bne(&no_arguments_adaptor);
2231
Ben Murdochda12d292016-06-02 14:46:10 +01002232 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002233 __ mr(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002234 __ LoadP(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002235 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002236 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002237 __ b(&formal_parameter_count_loaded);
2238
2239 __ bind(&no_arguments_adaptor);
2240 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01002241 __ LoadP(scratch1,
2242 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002243 __ LoadP(scratch1,
2244 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2245 __ LoadWordArith(
Ben Murdochda12d292016-06-02 14:46:10 +01002246 caller_args_count_reg,
2247 FieldMemOperand(scratch1,
2248 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002249#if !V8_TARGET_ARCH_PPC64
Ben Murdochda12d292016-06-02 14:46:10 +01002250 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002251#endif
2252
2253 __ bind(&formal_parameter_count_loaded);
2254
Ben Murdochda12d292016-06-02 14:46:10 +01002255 ParameterCount callee_args_count(args_reg);
2256 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2257 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002258 __ bind(&done);
2259}
2260} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002261
2262// static
2263void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002264 ConvertReceiverMode mode,
2265 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002266 // ----------- S t a t e -------------
2267 // -- r3 : the number of arguments (not including the receiver)
2268 // -- r4 : the function to call (checked to be a JSFunction)
2269 // -----------------------------------
2270 __ AssertFunction(r4);
2271
2272 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2273 // Check that the function is not a "classConstructor".
2274 Label class_constructor;
2275 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2276 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
2277 __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0);
2278 __ bne(&class_constructor, cr0);
2279
2280 // Enter the context of the function; ToObject has to run in the function
2281 // context, and we also need to take the global proxy from the function
2282 // context in case of conversion.
2283 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2284 // We need to convert the receiver for non-native sloppy mode functions.
2285 Label done_convert;
2286 __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2287 (1 << SharedFunctionInfo::kNativeBit)));
2288 __ bne(&done_convert, cr0);
2289 {
2290 // ----------- S t a t e -------------
2291 // -- r3 : the number of arguments (not including the receiver)
2292 // -- r4 : the function to call (checked to be a JSFunction)
2293 // -- r5 : the shared function info.
2294 // -- cp : the function context.
2295 // -----------------------------------
2296
2297 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2298 // Patch receiver to global proxy.
2299 __ LoadGlobalProxy(r6);
2300 } else {
2301 Label convert_to_object, convert_receiver;
2302 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
2303 __ LoadPX(r6, MemOperand(sp, r6));
2304 __ JumpIfSmi(r6, &convert_to_object);
2305 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2306 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2307 __ bge(&done_convert);
2308 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2309 Label convert_global_proxy;
2310 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
2311 &convert_global_proxy);
2312 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
2313 __ bind(&convert_global_proxy);
2314 {
2315 // Patch receiver to global proxy.
2316 __ LoadGlobalProxy(r6);
2317 }
2318 __ b(&convert_receiver);
2319 }
2320 __ bind(&convert_to_object);
2321 {
2322 // Convert receiver using ToObject.
2323 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2324 // in the fast case? (fall back to AllocateInNewSpace?)
2325 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2326 __ SmiTag(r3);
2327 __ Push(r3, r4);
2328 __ mr(r3, r6);
2329 ToObjectStub stub(masm->isolate());
2330 __ CallStub(&stub);
2331 __ mr(r6, r3);
2332 __ Pop(r3, r4);
2333 __ SmiUntag(r3);
2334 }
2335 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2336 __ bind(&convert_receiver);
2337 }
2338 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2339 __ StorePX(r6, MemOperand(sp, r7));
2340 }
2341 __ bind(&done_convert);
2342
2343 // ----------- S t a t e -------------
2344 // -- r3 : the number of arguments (not including the receiver)
2345 // -- r4 : the function to call (checked to be a JSFunction)
2346 // -- r5 : the shared function info.
2347 // -- cp : the function context.
2348 // -----------------------------------
2349
Ben Murdoch097c5b22016-05-18 11:27:45 +01002350 if (tail_call_mode == TailCallMode::kAllow) {
2351 PrepareForTailCall(masm, r3, r6, r7, r8);
2352 }
2353
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002354 __ LoadWordArith(
2355 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2356#if !V8_TARGET_ARCH_PPC64
2357 __ SmiUntag(r5);
2358#endif
2359 ParameterCount actual(r3);
2360 ParameterCount expected(r5);
2361 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION,
2362 CheckDebugStepCallWrapper());
2363
2364 // The function is a "classConstructor", need to raise an exception.
2365 __ bind(&class_constructor);
2366 {
2367 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2368 __ push(r4);
2369 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2370 }
2371}
2372
2373
2374namespace {
2375
2376void Generate_PushBoundArguments(MacroAssembler* masm) {
2377 // ----------- S t a t e -------------
2378 // -- r3 : the number of arguments (not including the receiver)
2379 // -- r4 : target (checked to be a JSBoundFunction)
2380 // -- r6 : new.target (only in case of [[Construct]])
2381 // -----------------------------------
2382
2383 // Load [[BoundArguments]] into r5 and length of that into r7.
2384 Label no_bound_arguments;
2385 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2386 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2387 __ SmiUntag(r7, SetRC);
2388 __ beq(&no_bound_arguments, cr0);
2389 {
2390 // ----------- S t a t e -------------
2391 // -- r3 : the number of arguments (not including the receiver)
2392 // -- r4 : target (checked to be a JSBoundFunction)
2393 // -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2394 // -- r6 : new.target (only in case of [[Construct]])
2395 // -- r7 : the number of [[BoundArguments]]
2396 // -----------------------------------
2397
2398 // Reserve stack space for the [[BoundArguments]].
2399 {
2400 Label done;
2401 __ mr(r9, sp); // preserve previous stack pointer
2402 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2403 __ sub(sp, sp, r10);
2404 // Check the stack for overflow. We are not trying to catch interruptions
2405 // (i.e. debug break and preemption) here, so check the "real stack
2406 // limit".
2407 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2408 __ bgt(&done); // Signed comparison.
2409 // Restore the stack pointer.
2410 __ mr(sp, r9);
2411 {
2412 FrameScope scope(masm, StackFrame::MANUAL);
2413 __ EnterFrame(StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002414 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002415 }
2416 __ bind(&done);
2417 }
2418
2419 // Relocate arguments down the stack.
2420 // -- r3 : the number of arguments (not including the receiver)
2421 // -- r9 : the previous stack pointer
2422 // -- r10: the size of the [[BoundArguments]]
2423 {
2424 Label skip, loop;
2425 __ li(r8, Operand::Zero());
2426 __ cmpi(r3, Operand::Zero());
2427 __ beq(&skip);
2428 __ mtctr(r3);
2429 __ bind(&loop);
2430 __ LoadPX(r0, MemOperand(r9, r8));
2431 __ StorePX(r0, MemOperand(sp, r8));
2432 __ addi(r8, r8, Operand(kPointerSize));
2433 __ bdnz(&loop);
2434 __ bind(&skip);
2435 }
2436
2437 // Copy [[BoundArguments]] to the stack (below the arguments).
2438 {
2439 Label loop;
2440 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2441 __ add(r5, r5, r10);
2442 __ mtctr(r7);
2443 __ bind(&loop);
2444 __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2445 __ StorePX(r0, MemOperand(sp, r8));
2446 __ addi(r8, r8, Operand(kPointerSize));
2447 __ bdnz(&loop);
2448 __ add(r3, r3, r7);
2449 }
2450 }
2451 __ bind(&no_bound_arguments);
2452}
2453
2454} // namespace
2455
2456
2457// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002458void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2459 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460 // ----------- S t a t e -------------
2461 // -- r3 : the number of arguments (not including the receiver)
2462 // -- r4 : the function to call (checked to be a JSBoundFunction)
2463 // -----------------------------------
2464 __ AssertBoundFunction(r4);
2465
Ben Murdoch097c5b22016-05-18 11:27:45 +01002466 if (tail_call_mode == TailCallMode::kAllow) {
2467 PrepareForTailCall(masm, r3, r6, r7, r8);
2468 }
2469
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002470 // Patch the receiver to [[BoundThis]].
2471 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2472 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2473 __ StorePX(ip, MemOperand(sp, r0));
2474
2475 // Push the [[BoundArguments]] onto the stack.
2476 Generate_PushBoundArguments(masm);
2477
2478 // Call the [[BoundTargetFunction]] via the Call builtin.
2479 __ LoadP(r4,
2480 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2481 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2482 masm->isolate())));
2483 __ LoadP(ip, MemOperand(ip));
2484 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2485 __ JumpToJSEntry(ip);
2486}
2487
2488
2489// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002490void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2491 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002492 // ----------- S t a t e -------------
2493 // -- r3 : the number of arguments (not including the receiver)
2494 // -- r4 : the target to call (can be any Object).
2495 // -----------------------------------
2496
2497 Label non_callable, non_function, non_smi;
2498 __ JumpIfSmi(r4, &non_callable);
2499 __ bind(&non_smi);
2500 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002501 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002502 RelocInfo::CODE_TARGET, eq);
2503 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002504 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002505 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002506
2507 // Check if target has a [[Call]] internal method.
2508 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2509 __ TestBit(r7, Map::kIsCallable, r0);
2510 __ beq(&non_callable, cr0);
2511
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002512 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2513 __ bne(&non_function);
2514
Ben Murdoch097c5b22016-05-18 11:27:45 +01002515 // 0. Prepare for tail call if necessary.
2516 if (tail_call_mode == TailCallMode::kAllow) {
2517 PrepareForTailCall(masm, r3, r6, r7, r8);
2518 }
2519
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002520 // 1. Runtime fallback for Proxy [[Call]].
2521 __ Push(r4);
2522 // Increase the arguments size to include the pushed function and the
2523 // existing receiver on the stack.
2524 __ addi(r3, r3, Operand(2));
2525 // Tail-call to the runtime.
2526 __ JumpToExternalReference(
2527 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2528
2529 // 2. Call to something else, which might have a [[Call]] internal method (if
2530 // not we raise an exception).
2531 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002532 // Overwrite the original receiver the (original) target.
2533 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2534 __ StorePX(r4, MemOperand(sp, r8));
2535 // Let the "call_as_function_delegate" take care of the rest.
2536 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2537 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002538 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002539 RelocInfo::CODE_TARGET);
2540
2541 // 3. Call to something that is not callable.
2542 __ bind(&non_callable);
2543 {
2544 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2545 __ Push(r4);
2546 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2547 }
2548}
2549
2550
2551// static
2552void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2553 // ----------- S t a t e -------------
2554 // -- r3 : the number of arguments (not including the receiver)
2555 // -- r4 : the constructor to call (checked to be a JSFunction)
2556 // -- r6 : the new target (checked to be a constructor)
2557 // -----------------------------------
2558 __ AssertFunction(r4);
2559
2560 // Calling convention for function specific ConstructStubs require
2561 // r5 to contain either an AllocationSite or undefined.
2562 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2563
2564 // Tail call to the function-specific construct stub (still in the caller
2565 // context at this point).
2566 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2567 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2568 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2569 __ JumpToJSEntry(ip);
2570}
2571
2572
2573// static
2574void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2575 // ----------- S t a t e -------------
2576 // -- r3 : the number of arguments (not including the receiver)
2577 // -- r4 : the function to call (checked to be a JSBoundFunction)
2578 // -- r6 : the new target (checked to be a constructor)
2579 // -----------------------------------
2580 __ AssertBoundFunction(r4);
2581
2582 // Push the [[BoundArguments]] onto the stack.
2583 Generate_PushBoundArguments(masm);
2584
2585 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2586 Label skip;
2587 __ cmp(r4, r6);
2588 __ bne(&skip);
2589 __ LoadP(r6,
2590 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2591 __ bind(&skip);
2592
2593 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2594 __ LoadP(r4,
2595 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2596 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2597 __ LoadP(ip, MemOperand(ip));
2598 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2599 __ JumpToJSEntry(ip);
2600}
2601
2602
2603// static
2604void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2605 // ----------- S t a t e -------------
2606 // -- r3 : the number of arguments (not including the receiver)
2607 // -- r4 : the constructor to call (checked to be a JSProxy)
2608 // -- r6 : the new target (either the same as the constructor or
2609 // the JSFunction on which new was invoked initially)
2610 // -----------------------------------
2611
2612 // Call into the Runtime for Proxy [[Construct]].
2613 __ Push(r4, r6);
2614 // Include the pushed new_target, constructor and the receiver.
2615 __ addi(r3, r3, Operand(3));
2616 // Tail-call to the runtime.
2617 __ JumpToExternalReference(
2618 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2619}
2620
2621
2622// static
2623void Builtins::Generate_Construct(MacroAssembler* masm) {
2624 // ----------- S t a t e -------------
2625 // -- r3 : the number of arguments (not including the receiver)
2626 // -- r4 : the constructor to call (can be any Object)
2627 // -- r6 : the new target (either the same as the constructor or
2628 // the JSFunction on which new was invoked initially)
2629 // -----------------------------------
2630
2631 // Check if target is a Smi.
2632 Label non_constructor;
2633 __ JumpIfSmi(r4, &non_constructor);
2634
2635 // Dispatch based on instance type.
2636 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2637 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2638 RelocInfo::CODE_TARGET, eq);
2639
2640 // Check if target has a [[Construct]] internal method.
2641 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2642 __ TestBit(r5, Map::kIsConstructor, r0);
2643 __ beq(&non_constructor, cr0);
2644
2645 // Only dispatch to bound functions after checking whether they are
2646 // constructors.
2647 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2648 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2649 RelocInfo::CODE_TARGET, eq);
2650
2651 // Only dispatch to proxies after checking whether they are constructors.
2652 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2653 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2654 eq);
2655
2656 // Called Construct on an exotic Object with a [[Construct]] internal method.
2657 {
2658 // Overwrite the original receiver with the (original) target.
2659 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2660 __ StorePX(r4, MemOperand(sp, r8));
2661 // Let the "call_as_constructor_delegate" take care of the rest.
2662 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2663 __ Jump(masm->isolate()->builtins()->CallFunction(),
2664 RelocInfo::CODE_TARGET);
2665 }
2666
2667 // Called Construct on an Object that doesn't have a [[Construct]] internal
2668 // method.
2669 __ bind(&non_constructor);
2670 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2671 RelocInfo::CODE_TARGET);
2672}
2673
Ben Murdochc5610432016-08-08 18:44:38 +01002674// static
2675void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2676 // ----------- S t a t e -------------
2677 // -- r4 : requested object size (untagged)
2678 // -- lr : return address
2679 // -----------------------------------
2680 __ SmiTag(r4);
2681 __ Push(r4);
2682 __ LoadSmiLiteral(cp, Smi::FromInt(0));
2683 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2684}
2685
2686// static
2687void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2688 // ----------- S t a t e -------------
2689 // -- r4 : requested object size (untagged)
2690 // -- lr : return address
2691 // -----------------------------------
2692 __ SmiTag(r4);
2693 __ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2694 __ Push(r4, r5);
2695 __ LoadSmiLiteral(cp, Smi::FromInt(0));
2696 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2697}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002698
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002699void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2700 // ----------- S t a t e -------------
2701 // -- r3 : actual number of arguments
2702 // -- r4 : function (passed through to callee)
2703 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002704 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002705 // -----------------------------------
2706
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002707 Label invoke, dont_adapt_arguments, stack_overflow;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002708
2709 Label enough, too_few;
2710 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2711 __ cmp(r3, r5);
2712 __ blt(&too_few);
2713 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2714 __ beq(&dont_adapt_arguments);
2715
2716 { // Enough parameters: actual >= expected
2717 __ bind(&enough);
2718 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002719 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002720
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721 // Calculate copy start address into r3 and copy end address into r7.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002722 // r3: actual number of arguments as a smi
2723 // r4: function
2724 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002725 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002726 // ip: code entry to call
2727 __ SmiToPtrArrayOffset(r3, r3);
2728 __ add(r3, r3, fp);
2729 // adjust for return address and receiver
2730 __ addi(r3, r3, Operand(2 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002731 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2732 __ sub(r7, r3, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002733
2734 // Copy the arguments (including the receiver) to the new stack frame.
2735 // r3: copy start address
2736 // r4: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002737 // r5: expected number of arguments
2738 // r6: new target (passed through to callee)
2739 // r7: copy end address
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002740 // ip: code entry to call
2741
2742 Label copy;
2743 __ bind(&copy);
2744 __ LoadP(r0, MemOperand(r3, 0));
2745 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002746 __ cmp(r3, r7); // Compare before moving to next argument.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002747 __ subi(r3, r3, Operand(kPointerSize));
2748 __ bne(&copy);
2749
2750 __ b(&invoke);
2751 }
2752
2753 { // Too few parameters: Actual < expected
2754 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002755
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002756 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002757 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002758
2759 // Calculate copy start address into r0 and copy end address is fp.
2760 // r3: actual number of arguments as a smi
2761 // r4: function
2762 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002763 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002764 // ip: code entry to call
2765 __ SmiToPtrArrayOffset(r3, r3);
2766 __ add(r3, r3, fp);
2767
2768 // Copy the arguments (including the receiver) to the new stack frame.
2769 // r3: copy start address
2770 // r4: function
2771 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002772 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002773 // ip: code entry to call
2774 Label copy;
2775 __ bind(&copy);
2776 // Adjust load for return address and receiver.
2777 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2778 __ push(r0);
2779 __ cmp(r3, fp); // Compare before moving to next argument.
2780 __ subi(r3, r3, Operand(kPointerSize));
2781 __ bne(&copy);
2782
2783 // Fill the remaining expected arguments with undefined.
2784 // r4: function
2785 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002786 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002787 // ip: code entry to call
2788 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002789 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2790 __ sub(r7, fp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002791 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002792 __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002793 2 * kPointerSize));
2794
2795 Label fill;
2796 __ bind(&fill);
2797 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002798 __ cmp(sp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002799 __ bne(&fill);
2800 }
2801
2802 // Call the entry point.
2803 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002804 __ mr(r3, r5);
2805 // r3 : expected number of arguments
2806 // r4 : function (passed through to callee)
2807 // r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002808 __ CallJSEntry(ip);
2809
2810 // Store offset of return address for deoptimizer.
2811 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2812
2813 // Exit frame and return.
2814 LeaveArgumentsAdaptorFrame(masm);
2815 __ blr();
2816
2817
2818 // -------------------------------------------
2819 // Dont adapt arguments.
2820 // -------------------------------------------
2821 __ bind(&dont_adapt_arguments);
2822 __ JumpToJSEntry(ip);
2823
2824 __ bind(&stack_overflow);
2825 {
2826 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002827 __ CallRuntime(Runtime::kThrowStackOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002828 __ bkpt(0);
2829 }
2830}
2831
2832
2833#undef __
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002834} // namespace internal
2835} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002836
2837#endif // V8_TARGET_ARCH_PPC