blob: 884afedb21b78979195f262a8275e753b6b44bcc [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005#if V8_TARGET_ARCH_PPC
6
7#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
20void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
23 // -- r3 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 // -- r4 : target
25 // -- r6 : new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 // -- sp[0] : last argument
27 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028 // -- sp[4 * (argc - 1)] : first argument
Emily Bernierd0a1eb72015-03-24 16:35:39 -040029 // -- sp[4 * argc] : receiver
30 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 __ AssertFunction(r4);
32
33 // Make sure we operate in the context of the called function (for example
34 // ConstructStubs implemented in C++ will be run in the context of the caller
35 // instead of the callee, due to the way that [[Construct]] is defined for
36 // ordinary functions).
37 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038
39 // Insert extra arguments.
40 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 switch (extra_args) {
42 case BuiltinExtraArguments::kTarget:
43 __ Push(r4);
44 ++num_extra_args;
45 break;
46 case BuiltinExtraArguments::kNewTarget:
47 __ Push(r6);
48 ++num_extra_args;
49 break;
50 case BuiltinExtraArguments::kTargetAndNewTarget:
51 __ Push(r4, r6);
52 num_extra_args += 2;
53 break;
54 case BuiltinExtraArguments::kNone:
55 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 }
57
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 // JumpToExternalReference expects r3 to contain the number of arguments
Emily Bernierd0a1eb72015-03-24 16:35:39 -040059 // including the receiver and the extra arguments.
60 __ addi(r3, r3, Operand(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
63}
64
65
66// Load the built-in InternalArray function from the current context.
67static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
68 Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 // Load the InternalArray function from the current native context.
70 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071}
72
73
74// Load the built-in Array function from the current context.
75static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 // Load the Array function from the current native context.
77 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078}
79
80
81void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
82 // ----------- S t a t e -------------
83 // -- r3 : number of arguments
84 // -- lr : return address
85 // -- sp[...]: constructor arguments
86 // -----------------------------------
87 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
88
89 // Get the InternalArray function.
90 GenerateLoadInternalArrayFunction(masm, r4);
91
92 if (FLAG_debug_code) {
93 // Initial map for the builtin InternalArray functions should be maps.
94 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
95 __ TestIfSmi(r5, r0);
96 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
97 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
98 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
99 }
100
101 // Run the native code for the InternalArray function called as a normal
102 // function.
103 // tail call a stub
104 InternalArrayConstructorStub stub(masm->isolate());
105 __ TailCallStub(&stub);
106}
107
108
109void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
110 // ----------- S t a t e -------------
111 // -- r3 : number of arguments
112 // -- lr : return address
113 // -- sp[...]: constructor arguments
114 // -----------------------------------
115 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
116
117 // Get the Array function.
118 GenerateLoadArrayFunction(masm, r4);
119
120 if (FLAG_debug_code) {
121 // Initial map for the builtin Array functions should be maps.
122 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
123 __ TestIfSmi(r5, r0);
124 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
125 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
126 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
127 }
128
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000129 __ mr(r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400130 // Run the native code for the Array function called as a normal function.
131 // tail call a stub
132 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
133 ArrayConstructorStub stub(masm->isolate());
134 __ TailCallStub(&stub);
135}
136
137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000138// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
140 // ----------- S t a t e -------------
141 // -- r3 : number of arguments
142 // -- lr : return address
143 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
144 // -- sp[(argc + 1) * 8] : receiver
145 // -----------------------------------
146 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
147 Heap::RootListIndex const root_index =
148 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
149 : Heap::kMinusInfinityValueRootIndex;
150 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
151
152 // Load the accumulator with the default return value (either -Infinity or
153 // +Infinity), with the tagged value in r4 and the double value in d1.
154 __ LoadRoot(r4, root_index);
155 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
156
157 // Setup state for loop
158 // r5: address of arg[0] + kPointerSize
159 // r6: number of slots to drop at exit (arguments + receiver)
160 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
161 __ add(r5, sp, r5);
162 __ addi(r6, r3, Operand(1));
163
164 Label done_loop, loop;
165 __ bind(&loop);
166 {
167 // Check if all parameters done.
168 __ cmpl(r5, sp);
169 __ ble(&done_loop);
170
171 // Load the next parameter tagged value into r3.
172 __ LoadPU(r3, MemOperand(r5, -kPointerSize));
173
174 // Load the double value of the parameter into d2, maybe converting the
175 // parameter to a number first using the ToNumberStub if necessary.
176 Label convert, convert_smi, convert_number, done_convert;
177 __ bind(&convert);
178 __ JumpIfSmi(r3, &convert_smi);
179 __ LoadP(r7, FieldMemOperand(r3, HeapObject::kMapOffset));
180 __ JumpIfRoot(r7, Heap::kHeapNumberMapRootIndex, &convert_number);
181 {
182 // Parameter is not a Number, use the ToNumberStub to convert it.
183 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
184 __ SmiTag(r6);
185 __ Push(r4, r5, r6);
186 ToNumberStub stub(masm->isolate());
187 __ CallStub(&stub);
188 __ Pop(r4, r5, r6);
189 __ SmiUntag(r6);
190 {
191 // Restore the double accumulator value (d1).
192 Label done_restore;
193 __ SmiToDouble(d1, r4);
194 __ JumpIfSmi(r4, &done_restore);
195 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
196 __ bind(&done_restore);
197 }
198 }
199 __ b(&convert);
200 __ bind(&convert_number);
201 __ lfd(d2, FieldMemOperand(r3, HeapNumber::kValueOffset));
202 __ b(&done_convert);
203 __ bind(&convert_smi);
204 __ SmiToDouble(d2, r3);
205 __ bind(&done_convert);
206
207 // Perform the actual comparison with the accumulator value on the left hand
208 // side (d1) and the next parameter value on the right hand side (d2).
209 Label compare_nan, compare_swap;
210 __ fcmpu(d1, d2);
211 __ bunordered(&compare_nan);
212 __ b(cond_done, &loop);
213 __ b(CommuteCondition(cond_done), &compare_swap);
214
215 // Left and right hand side are equal, check for -0 vs. +0.
216 __ TestDoubleIsMinusZero(reg, r7, r8);
217 __ bne(&loop);
218
219 // Update accumulator. Result is on the right hand side.
220 __ bind(&compare_swap);
221 __ fmr(d1, d2);
222 __ mr(r4, r3);
223 __ b(&loop);
224
225 // At least one side is NaN, which means that the result will be NaN too.
226 // We still need to visit the rest of the arguments.
227 __ bind(&compare_nan);
228 __ LoadRoot(r4, Heap::kNanValueRootIndex);
229 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
230 __ b(&loop);
231 }
232
233 __ bind(&done_loop);
234 __ mr(r3, r4);
235 __ Drop(r6);
236 __ Ret();
237}
238
239// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400241 // ----------- S t a t e -------------
242 // -- r3 : number of arguments
243 // -- r4 : constructor function
244 // -- lr : return address
245 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
246 // -- sp[argc * 4] : receiver
247 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 // 1. Load the first argument into r3 and get rid of the rest (including the
250 // receiver).
251 Label no_arguments;
252 {
253 __ cmpi(r3, Operand::Zero());
254 __ beq(&no_arguments);
255 __ subi(r3, r3, Operand(1));
256 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
257 __ LoadPUX(r3, MemOperand(sp, r3));
258 __ Drop(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400259 }
260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 // 2a. Convert the first argument to a number.
262 ToNumberStub stub(masm->isolate());
263 __ TailCallStub(&stub);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 // 2b. No arguments, return +0.
266 __ bind(&no_arguments);
267 __ LoadSmiLiteral(r3, Smi::FromInt(0));
268 __ Ret(1);
269}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271
272// static
273void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400274 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 // -- r3 : number of arguments
276 // -- r4 : constructor function
277 // -- r6 : new target
278 // -- lr : return address
279 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
280 // -- sp[argc * 4] : receiver
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400281 // -----------------------------------
282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 // 1. Make sure we operate in the context of the called function.
284 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400285
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 // 2. Load the first argument into r5 and get rid of the rest (including the
287 // receiver).
288 {
289 Label no_arguments, done;
290 __ cmpi(r3, Operand::Zero());
291 __ beq(&no_arguments);
292 __ subi(r3, r3, Operand(1));
293 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
294 __ LoadPUX(r5, MemOperand(sp, r5));
295 __ Drop(2);
296 __ b(&done);
297 __ bind(&no_arguments);
298 __ LoadSmiLiteral(r5, Smi::FromInt(0));
299 __ Drop(1);
300 __ bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400301 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400302
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000303 // 3. Make sure r5 is a number.
304 {
305 Label done_convert;
306 __ JumpIfSmi(r5, &done_convert);
307 __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE);
308 __ beq(&done_convert);
309 {
310 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
311 __ Push(r4, r6);
312 __ mr(r3, r5);
313 ToNumberStub stub(masm->isolate());
314 __ CallStub(&stub);
315 __ mr(r5, r3);
316 __ Pop(r4, r6);
317 }
318 __ bind(&done_convert);
319 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 // 4. Check if new target and constructor differ.
322 Label new_object;
323 __ cmp(r4, r6);
324 __ bne(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 // 5. Allocate a JSValue wrapper for the number.
327 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400328 __ Ret();
329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 // 6. Fallback to the runtime to create new object.
331 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400332 {
333 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100334 __ Push(r5); // first argument
335 FastNewObjectStub stub(masm->isolate());
336 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400338 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000339 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
340 __ Ret();
341}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400342
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343
344// static
345void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
346 // ----------- S t a t e -------------
347 // -- r3 : number of arguments
348 // -- r4 : constructor function
349 // -- lr : return address
350 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
351 // -- sp[argc * 4] : receiver
352 // -----------------------------------
353
354 // 1. Load the first argument into r3 and get rid of the rest (including the
355 // receiver).
356 Label no_arguments;
357 {
358 __ cmpi(r3, Operand::Zero());
359 __ beq(&no_arguments);
360 __ subi(r3, r3, Operand(1));
361 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
362 __ LoadPUX(r3, MemOperand(sp, r3));
363 __ Drop(2);
364 }
365
366 // 2a. At least one argument, return r3 if it's a string, otherwise
367 // dispatch to appropriate conversion.
368 Label to_string, symbol_descriptive_string;
369 {
370 __ JumpIfSmi(r3, &to_string);
371 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
372 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
373 __ bgt(&to_string);
374 __ beq(&symbol_descriptive_string);
375 __ Ret();
376 }
377
378 // 2b. No arguments, return the empty string (and pop the receiver).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400379 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 {
381 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
382 __ Ret(1);
383 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000385 // 3a. Convert r3 to a string.
386 __ bind(&to_string);
387 {
388 ToStringStub stub(masm->isolate());
389 __ TailCallStub(&stub);
390 }
391
392 // 3b. Convert symbol in r3 to a string.
393 __ bind(&symbol_descriptive_string);
394 {
395 __ Push(r3);
396 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
397 }
398}
399
400
401// static
402void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
403 // ----------- S t a t e -------------
404 // -- r3 : number of arguments
405 // -- r4 : constructor function
406 // -- r6 : new target
407 // -- lr : return address
408 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
409 // -- sp[argc * 4] : receiver
410 // -----------------------------------
411
412 // 1. Make sure we operate in the context of the called function.
413 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
414
415 // 2. Load the first argument into r5 and get rid of the rest (including the
416 // receiver).
417 {
418 Label no_arguments, done;
419 __ cmpi(r3, Operand::Zero());
420 __ beq(&no_arguments);
421 __ subi(r3, r3, Operand(1));
422 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
423 __ LoadPUX(r5, MemOperand(sp, r5));
424 __ Drop(2);
425 __ b(&done);
426 __ bind(&no_arguments);
427 __ LoadRoot(r5, Heap::kempty_stringRootIndex);
428 __ Drop(1);
429 __ bind(&done);
430 }
431
432 // 3. Make sure r5 is a string.
433 {
434 Label convert, done_convert;
435 __ JumpIfSmi(r5, &convert);
436 __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE);
437 __ blt(&done_convert);
438 __ bind(&convert);
439 {
440 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
441 ToStringStub stub(masm->isolate());
442 __ Push(r4, r6);
443 __ mr(r3, r5);
444 __ CallStub(&stub);
445 __ mr(r5, r3);
446 __ Pop(r4, r6);
447 }
448 __ bind(&done_convert);
449 }
450
451 // 4. Check if new target and constructor differ.
452 Label new_object;
453 __ cmp(r4, r6);
454 __ bne(&new_object);
455
456 // 5. Allocate a JSValue wrapper for the string.
457 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
458 __ Ret();
459
460 // 6. Fallback to the runtime to create new object.
461 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400462 {
463 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 __ Push(r5); // first argument
465 FastNewObjectStub stub(masm->isolate());
466 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000467 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400468 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400470 __ Ret();
471}
472
473
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400474static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
475 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
476 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
477 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
478 __ JumpToJSEntry(ip);
479}
480
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
482 Runtime::FunctionId function_id) {
483 // ----------- S t a t e -------------
484 // -- r3 : argument count (preserved for callee)
485 // -- r4 : target function (preserved for callee)
486 // -- r6 : new target (preserved for callee)
487 // -----------------------------------
488 {
489 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
490 // Push the number of arguments to the callee.
491 // Push a copy of the target function and the new target.
492 // Push function as parameter to the runtime call.
493 __ SmiTag(r3);
494 __ Push(r3, r4, r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400495
Ben Murdoch097c5b22016-05-18 11:27:45 +0100496 __ CallRuntime(function_id, 1);
497 __ mr(r5, r3);
498
499 // Restore target function and new target.
500 __ Pop(r3, r4, r6);
501 __ SmiUntag(r3);
502 }
503 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504 __ JumpToJSEntry(ip);
505}
506
507
508void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
509 // Checking whether the queued function is ready for install is optional,
510 // since we come across interrupts and stack checks elsewhere. However,
511 // not checking may delay installing ready functions, and always checking
512 // would be quite expensive. A good compromise is to first check against
513 // stack limit as a cue for an interrupt signal.
514 Label ok;
515 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
516 __ cmpl(sp, ip);
517 __ bge(&ok);
518
Ben Murdoch097c5b22016-05-18 11:27:45 +0100519 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400520
521 __ bind(&ok);
522 GenerateTailCallToSharedCode(masm);
523}
524
525
526static void Generate_JSConstructStubHelper(MacroAssembler* masm,
527 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100528 bool create_implicit_receiver,
529 bool check_derived_construct) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400530 // ----------- S t a t e -------------
531 // -- r3 : number of arguments
532 // -- r4 : constructor function
533 // -- r5 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534 // -- r6 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100535 // -- cp : context
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400536 // -- lr : return address
537 // -- sp[...]: constructor arguments
538 // -----------------------------------
539
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400540 Isolate* isolate = masm->isolate();
541
542 // Enter a construct frame.
543 {
544 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
545
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000546 // Preserve the incoming parameters on the stack.
547 __ AssertUndefinedOrAllocationSite(r5, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400548
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 if (!create_implicit_receiver) {
550 __ SmiTag(r7, r3, SetRC);
Ben Murdochda12d292016-06-02 14:46:10 +0100551 __ Push(cp, r5, r7);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000552 __ PushRoot(Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400553 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 __ SmiTag(r3);
Ben Murdochda12d292016-06-02 14:46:10 +0100555 __ Push(cp, r5, r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556
Ben Murdoch097c5b22016-05-18 11:27:45 +0100557 // Allocate the new receiver object.
558 __ Push(r4, r6);
559 FastNewObjectStub stub(masm->isolate());
560 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561 __ mr(r7, r3);
562 __ Pop(r4, r6);
563
Ben Murdoch097c5b22016-05-18 11:27:45 +0100564 // ----------- S t a t e -------------
565 // -- r4: constructor function
566 // -- r6: new target
567 // -- r7: newly allocated object
568 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569
570 // Retrieve smi-tagged arguments count from the stack.
571 __ LoadP(r3, MemOperand(sp));
572 __ SmiUntag(r3, SetRC);
573
574 // Push the allocated receiver to the stack. We need two copies
575 // because we may have to return the original one and the calling
576 // conventions dictate that the called function pops the receiver.
577 __ Push(r7, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400578 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400579
580 // Set up pointer to last argument.
581 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
582
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400583 // Copy arguments and receiver to the expression stack.
584 // r3: number of arguments
585 // r4: constructor function
586 // r5: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000587 // r6: new target
588 // cr0: condition indicating whether r3 is zero
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400589 // sp[0]: receiver
590 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 // sp[2]: number of arguments (smi-tagged)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400592 Label loop, no_args;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593 __ beq(&no_args, cr0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400594 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595 __ sub(sp, sp, ip);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400596 __ mtctr(r3);
597 __ bind(&loop);
598 __ subi(ip, ip, Operand(kPointerSize));
599 __ LoadPX(r0, MemOperand(r5, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600 __ StorePX(r0, MemOperand(sp, ip));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400601 __ bdnz(&loop);
602 __ bind(&no_args);
603
604 // Call the function.
605 // r3: number of arguments
606 // r4: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607 // r6: new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400608 if (is_api_function) {
609 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
610 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
611 __ Call(code, RelocInfo::CODE_TARGET);
612 } else {
613 ParameterCount actual(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION,
615 CheckDebugStepCallWrapper());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400616 }
617
618 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619 if (create_implicit_receiver && !is_api_function) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400620 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
621 }
622
623 // Restore context from the frame.
624 // r3: result
625 // sp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000626 // sp[1]: number of arguments (smi-tagged)
Ben Murdochda12d292016-06-02 14:46:10 +0100627 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629 if (create_implicit_receiver) {
630 // If the result is an object (in the ECMA sense), we should get rid
631 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
632 // on page 74.
633 Label use_receiver, exit;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635 // If the result is a smi, it is *not* an object in the ECMA sense.
636 // r3: result
637 // sp[0]: receiver
638 // sp[1]: number of arguments (smi-tagged)
639 __ JumpIfSmi(r3, &use_receiver);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000641 // If the type of the result (stored in its map) is less than
642 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
643 __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE);
644 __ bge(&exit);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400645
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 // Throw away the result of the constructor invocation and use the
647 // on-stack receiver as the result.
648 __ bind(&use_receiver);
649 __ LoadP(r3, MemOperand(sp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400650
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000651 // Remove receiver from the stack, remove caller arguments, and
652 // return.
653 __ bind(&exit);
654 // r3: result
655 // sp[0]: receiver (newly allocated object)
656 // sp[1]: number of arguments (smi-tagged)
657 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
658 } else {
659 __ LoadP(r4, MemOperand(sp));
660 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400661
662 // Leave construct frame.
663 }
664
Ben Murdoch097c5b22016-05-18 11:27:45 +0100665 // ES6 9.2.2. Step 13+
666 // Check that the result is not a Smi, indicating that the constructor result
667 // from a derived class is neither undefined nor an Object.
668 if (check_derived_construct) {
669 Label dont_throw;
670 __ JumpIfNotSmi(r3, &dont_throw);
671 {
672 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
673 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
674 }
675 __ bind(&dont_throw);
676 }
677
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400678 __ SmiToPtrArrayOffset(r4, r4);
679 __ add(sp, sp, r4);
680 __ addi(sp, sp, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000681 if (create_implicit_receiver) {
682 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
683 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400684 __ blr();
685}
686
687
688void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100689 Generate_JSConstructStubHelper(masm, false, true, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400690}
691
692
693void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100694 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000695}
696
697
698void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100699 Generate_JSConstructStubHelper(masm, false, false, false);
700}
701
702
703void Builtins::Generate_JSBuiltinsConstructStubForDerived(
704 MacroAssembler* masm) {
705 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000706}
707
708
709void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
710 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
711 __ push(r4);
712 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
713}
714
715
716enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
717
718
719// Clobbers r5; preserves all other registers.
720static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
721 IsTagged argc_is_tagged) {
722 // Check the stack for overflow. We are not trying to catch
723 // interruptions (e.g. debug break and preemption) here, so the "real stack
724 // limit" is checked.
725 Label okay;
726 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
727 // Make r5 the space we have left. The stack might already be overflowed
728 // here which will cause r5 to become negative.
729 __ sub(r5, sp, r5);
730 // Check if the arguments will overflow the stack.
731 if (argc_is_tagged == kArgcIsSmiTagged) {
732 __ SmiToPtrArrayOffset(r0, argc);
733 } else {
734 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
735 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
736 }
737 __ cmp(r5, r0);
738 __ bgt(&okay); // Signed comparison.
739
740 // Out of stack space.
741 __ CallRuntime(Runtime::kThrowStackOverflow);
742
743 __ bind(&okay);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400744}
745
746
747static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
748 bool is_construct) {
749 // Called from Generate_JS_Entry
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000750 // r3: new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400751 // r4: function
752 // r5: receiver
753 // r6: argc
754 // r7: argv
755 // r0,r8-r9, cp may be clobbered
756 ProfileEntryHookStub::MaybeCallEntryHook(masm);
757
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400758 // Enter an internal frame.
759 {
760 FrameScope scope(masm, StackFrame::INTERNAL);
761
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000762 // Setup the context (we need to use the caller context from the isolate).
763 ExternalReference context_address(Isolate::kContextAddress,
764 masm->isolate());
765 __ mov(cp, Operand(context_address));
766 __ LoadP(cp, MemOperand(cp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400767
768 __ InitializeRootRegister();
769
770 // Push the function and the receiver onto the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000771 __ Push(r4, r5);
772
773 // Check if we have enough stack space to push all arguments.
774 // Clobbers r5.
775 Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400776
777 // Copy arguments to the stack in a loop.
778 // r4: function
779 // r6: argc
780 // r7: argv, i.e. points to first arg
781 Label loop, entry;
782 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
783 __ add(r5, r7, r0);
784 // r5 points past last arg.
785 __ b(&entry);
786 __ bind(&loop);
787 __ LoadP(r8, MemOperand(r7)); // read next parameter
788 __ addi(r7, r7, Operand(kPointerSize));
789 __ LoadP(r0, MemOperand(r8)); // dereference handle
790 __ push(r0); // push parameter
791 __ bind(&entry);
792 __ cmp(r7, r5);
793 __ bne(&loop);
794
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000795 // Setup new.target and argc.
796 __ mr(r7, r3);
797 __ mr(r3, r6);
798 __ mr(r6, r7);
799
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400800 // Initialize all JavaScript callee-saved registers, since they will be seen
801 // by the garbage collector as part of handlers.
802 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
803 __ mr(r14, r7);
804 __ mr(r15, r7);
805 __ mr(r16, r7);
806 __ mr(r17, r7);
807
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000808 // Invoke the code.
809 Handle<Code> builtin = is_construct
810 ? masm->isolate()->builtins()->Construct()
811 : masm->isolate()->builtins()->Call();
812 __ Call(builtin, RelocInfo::CODE_TARGET);
813
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400814 // Exit the JS frame and remove the parameters (except function), and
815 // return.
816 }
817 __ blr();
818
819 // r3: result
820}
821
822
823void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
824 Generate_JSEntryTrampolineHelper(masm, false);
825}
826
827
828void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
829 Generate_JSEntryTrampolineHelper(masm, true);
830}
831
832
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000833// Generate code for entering a JS function with the interpreter.
834// On entry to the function the receiver and arguments have been pushed on the
835// stack left to right. The actual argument count matches the formal parameter
836// count expected by the function.
837//
838// The live registers are:
839// o r4: the JS function object being called.
840// o r6: the new target
841// o cp: our context
842// o pp: the caller's constant pool pointer (if enabled)
843// o fp: the caller's frame pointer
844// o sp: stack pointer
845// o lr: return address
846//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100847// The function builds an interpreter frame. See InterpreterFrameConstants in
848// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000849void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
850 // Open a frame scope to indicate that there is a frame on the stack. The
851 // MANUAL indicates that the scope shouldn't actually generate code to set up
852 // the frame (that is done below).
853 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +0100854 __ PushStandardFrame(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000855
856 // Get the bytecode array from the function object and load the pointer to the
857 // first entry into kInterpreterBytecodeRegister.
858 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100859 Label array_done;
860 Register debug_info = r5;
861 DCHECK(!debug_info.is(r3));
862 __ LoadP(debug_info,
863 FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset));
864 // Load original bytecode array or the debug copy.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000865 __ LoadP(kInterpreterBytecodeArrayRegister,
866 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100867 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
868 __ beq(&array_done);
869 __ LoadP(kInterpreterBytecodeArrayRegister,
870 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
871 __ bind(&array_done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000872
873 if (FLAG_debug_code) {
874 // Check function data field is actually a BytecodeArray object.
875 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
876 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
877 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
878 BYTECODE_ARRAY_TYPE);
879 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
880 }
881
Ben Murdoch097c5b22016-05-18 11:27:45 +0100882 // Push new.target, bytecode array and zero for bytecode array offset.
883 __ li(r3, Operand::Zero());
884 __ Push(r6, kInterpreterBytecodeArrayRegister, r3);
885
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000886 // Allocate the local and temporary register file on the stack.
887 {
888 // Load frame size (word) from the BytecodeArray object.
889 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
890 BytecodeArray::kFrameSizeOffset));
891
892 // Do a stack check to ensure we don't go over the limit.
893 Label ok;
894 __ sub(r6, sp, r5);
895 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
896 __ cmpl(r6, r0);
897 __ bge(&ok);
898 __ CallRuntime(Runtime::kThrowStackOverflow);
899 __ bind(&ok);
900
901 // If ok, push undefined as the initial value for all register file entries.
902 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
903 Label loop, no_args;
904 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
905 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
906 __ beq(&no_args, cr0);
907 __ mtctr(r5);
908 __ bind(&loop);
909 __ push(r6);
910 __ bdnz(&loop);
911 __ bind(&no_args);
912 }
913
914 // TODO(rmcilroy): List of things not currently dealt with here but done in
915 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000916 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000917 // - Code aging of the BytecodeArray object.
918
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000919 // Load accumulator, register file, bytecode offset, dispatch table into
920 // registers.
921 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
922 __ addi(kInterpreterRegisterFileRegister, fp,
923 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
924 __ mov(kInterpreterBytecodeOffsetRegister,
925 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100926 __ mov(kInterpreterDispatchTableRegister,
927 Operand(ExternalReference::interpreter_dispatch_table_address(
928 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000929
930 // Dispatch to the first bytecode handler for the function.
931 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
932 kInterpreterBytecodeOffsetRegister));
933 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
934 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
935 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
936 // and header removal.
937 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
938 __ Call(ip);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100939
940 // Even though the first bytecode handler was called, we will never return.
941 __ Abort(kUnexpectedReturnFromBytecodeHandler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942}
943
944
945void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
946 // TODO(rmcilroy): List of things not currently dealt with here but done in
947 // fullcodegen's EmitReturnSequence.
948 // - Supporting FLAG_trace for Runtime::TraceExit.
949 // - Support profiler (specifically decrementing profiling_counter
950 // appropriately and calling out to HandleInterrupts if necessary).
951
952 // The return value is in accumulator, which is already in r3.
953
954 // Leave the frame (also dropping the register file).
955 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
956
957 // Drop receiver + arguments and return.
958 __ lwz(r0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
959 BytecodeArray::kParameterSizeOffset));
960 __ add(sp, sp, r0);
961 __ blr();
962}
963
964
965static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
966 Register count, Register scratch) {
967 Label loop;
968 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU
969 __ mtctr(count);
970 __ bind(&loop);
971 __ LoadPU(scratch, MemOperand(index, -kPointerSize));
972 __ push(scratch);
973 __ bdnz(&loop);
974}
975
976
977// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100978void Builtins::Generate_InterpreterPushArgsAndCallImpl(
979 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000980 // ----------- S t a t e -------------
981 // -- r3 : the number of arguments (not including the receiver)
982 // -- r5 : the address of the first argument to be pushed. Subsequent
983 // arguments should be consecutive above this, in the same order as
984 // they are to be pushed onto the stack.
985 // -- r4 : the target to call (can be any Object).
986 // -----------------------------------
987
988 // Calculate number of arguments (add one for receiver).
989 __ addi(r6, r3, Operand(1));
990
991 // Push the arguments.
992 Generate_InterpreterPushArgs(masm, r5, r6, r7);
993
994 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100995 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
996 tail_call_mode),
997 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000998}
999
1000
1001// static
1002void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1003 // ----------- S t a t e -------------
1004 // -- r3 : argument count (not including receiver)
1005 // -- r6 : new target
1006 // -- r4 : constructor to call
1007 // -- r5 : address of the first argument
1008 // -----------------------------------
1009
1010 // Push a slot for the receiver to be constructed.
1011 __ li(r0, Operand::Zero());
1012 __ push(r0);
1013
1014 // Push the arguments (skip if none).
1015 Label skip;
1016 __ cmpi(r3, Operand::Zero());
1017 __ beq(&skip);
1018 Generate_InterpreterPushArgs(masm, r5, r3, r7);
1019 __ bind(&skip);
1020
1021 // Call the constructor with r3, r4, and r6 unmodified.
1022 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1023}
1024
1025
Ben Murdoch097c5b22016-05-18 11:27:45 +01001026static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027 // Initialize register file register and dispatch table register.
1028 __ addi(kInterpreterRegisterFileRegister, fp,
1029 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001030 __ mov(kInterpreterDispatchTableRegister,
1031 Operand(ExternalReference::interpreter_dispatch_table_address(
1032 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001033
1034 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 __ LoadP(kContextRegister,
1036 MemOperand(kInterpreterRegisterFileRegister,
1037 InterpreterFrameConstants::kContextFromRegisterPointer));
1038
1039 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001040 __ LoadP(
1041 kInterpreterBytecodeArrayRegister,
1042 MemOperand(kInterpreterRegisterFileRegister,
1043 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001044
1045 if (FLAG_debug_code) {
1046 // Check function data field is actually a BytecodeArray object.
1047 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1048 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1049 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1050 BYTECODE_ARRAY_TYPE);
1051 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1052 }
1053
1054 // Get the target bytecode offset from the frame.
1055 __ LoadP(kInterpreterBytecodeOffsetRegister,
1056 MemOperand(
1057 kInterpreterRegisterFileRegister,
1058 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1059 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1060
1061 // Dispatch to the target bytecode.
1062 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1063 kInterpreterBytecodeOffsetRegister));
1064 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1065 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1066 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1067 __ Jump(ip);
1068}
1069
1070
Ben Murdoch097c5b22016-05-18 11:27:45 +01001071static void Generate_InterpreterNotifyDeoptimizedHelper(
1072 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1073 // Enter an internal frame.
1074 {
1075 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1076
1077 // Pass the deoptimization type to the runtime system.
1078 __ LoadSmiLiteral(r4, Smi::FromInt(static_cast<int>(type)));
1079 __ Push(r4);
1080 __ CallRuntime(Runtime::kNotifyDeoptimized);
1081 // Tear down internal frame.
1082 }
1083
1084 // Drop state (we don't use these for interpreter deopts) and and pop the
1085 // accumulator value into the accumulator register.
1086 __ Drop(1);
1087 __ Pop(kInterpreterAccumulatorRegister);
1088
1089 // Enter the bytecode dispatch.
1090 Generate_EnterBytecodeDispatch(masm);
1091}
1092
1093
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1095 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1096}
1097
1098
1099void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1100 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1101}
1102
1103
1104void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1105 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1106}
1107
Ben Murdoch097c5b22016-05-18 11:27:45 +01001108void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1109 // Set the address of the interpreter entry trampoline as a return address.
1110 // This simulates the initial call to bytecode handlers in interpreter entry
1111 // trampoline. The return will never actually be taken, but our stack walker
1112 // uses this address to determine whether a frame is interpreted.
1113 __ mov(r0,
1114 Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1115 __ mtlr(r0);
1116
1117 Generate_EnterBytecodeDispatch(masm);
1118}
1119
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001120
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001121void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001122 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001123}
1124
1125
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001126void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001127 GenerateTailCallToReturnedCode(masm,
1128 Runtime::kCompileOptimized_NotConcurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001129}
1130
1131
1132void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001133 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001134}
1135
1136
1137static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1138 // For now, we are relying on the fact that make_code_young doesn't do any
1139 // garbage collection which allows us to save/restore the registers without
1140 // worrying about which of them contain pointers. We also don't build an
1141 // internal frame to make the code faster, since we shouldn't have to do stack
1142 // crawls in MakeCodeYoung. This seems a bit fragile.
1143
1144 // Point r3 at the start of the PlatformCodeAge sequence.
1145 __ mr(r3, ip);
1146
1147 // The following registers must be saved and restored when calling through to
1148 // the runtime:
1149 // r3 - contains return address (beginning of patch sequence)
1150 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001151 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001152 // lr - return address
1153 FrameScope scope(masm, StackFrame::MANUAL);
1154 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001155 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001156 __ PrepareCallCFunction(2, 0, r5);
1157 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1158 __ CallCFunction(
1159 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001161 __ mtlr(r0);
1162 __ mr(ip, r3);
1163 __ Jump(ip);
1164}
1165
1166#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1167 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1168 MacroAssembler* masm) { \
1169 GenerateMakeCodeYoungAgainCommon(masm); \
1170 } \
1171 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1172 MacroAssembler* masm) { \
1173 GenerateMakeCodeYoungAgainCommon(masm); \
1174 }
1175CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1176#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1177
1178
1179void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1180 // For now, we are relying on the fact that make_code_young doesn't do any
1181 // garbage collection which allows us to save/restore the registers without
1182 // worrying about which of them contain pointers. We also don't build an
1183 // internal frame to make the code faster, since we shouldn't have to do stack
1184 // crawls in MakeCodeYoung. This seems a bit fragile.
1185
1186 // Point r3 at the start of the PlatformCodeAge sequence.
1187 __ mr(r3, ip);
1188
1189 // The following registers must be saved and restored when calling through to
1190 // the runtime:
1191 // r3 - contains return address (beginning of patch sequence)
1192 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001193 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001194 // lr - return address
1195 FrameScope scope(masm, StackFrame::MANUAL);
1196 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001197 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001198 __ PrepareCallCFunction(2, 0, r5);
1199 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1200 __ CallCFunction(
1201 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1202 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001203 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001204 __ mtlr(r0);
1205 __ mr(ip, r3);
1206
1207 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001208 __ PushStandardFrame(r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001209
1210 // Jump to point after the code-age stub.
1211 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1212 __ Jump(r3);
1213}
1214
1215
1216void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1217 GenerateMakeCodeYoungAgainCommon(masm);
1218}
1219
1220
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001221void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1222 Generate_MarkCodeAsExecutedOnce(masm);
1223}
1224
1225
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001226static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1227 SaveFPRegsMode save_doubles) {
1228 {
1229 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1230
1231 // Preserve registers across notification, this is important for compiled
1232 // stubs that tail call the runtime on deopts passing their parameters in
1233 // registers.
1234 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1235 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001236 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001237 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1238 }
1239
1240 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1241 __ blr(); // Jump to miss handler
1242}
1243
1244
1245void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1246 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1247}
1248
1249
1250void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1251 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1252}
1253
1254
1255static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1256 Deoptimizer::BailoutType type) {
1257 {
1258 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1259 // Pass the function and deoptimization type to the runtime system.
1260 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1261 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001262 __ CallRuntime(Runtime::kNotifyDeoptimized);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001263 }
1264
1265 // Get the full codegen state from the stack and untag it -> r9.
1266 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1267 __ SmiUntag(r9);
1268 // Switch on the state.
1269 Label with_tos_register, unknown_state;
1270 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
1271 __ bne(&with_tos_register);
1272 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1273 __ Ret();
1274
1275 __ bind(&with_tos_register);
1276 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1277 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
1278 __ bne(&unknown_state);
1279 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1280 __ Ret();
1281
1282 __ bind(&unknown_state);
1283 __ stop("no cases left");
1284}
1285
1286
1287void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1288 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1289}
1290
1291
1292void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1293 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1294}
1295
1296
1297void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1298 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1299}
1300
1301
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001302// Clobbers registers {r7, r8, r9, r10}.
1303void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1304 Register function_template_info,
1305 Label* receiver_check_failed) {
1306 Register signature = r7;
1307 Register map = r8;
1308 Register constructor = r9;
1309 Register scratch = r10;
1310
1311 // If there is no signature, return the holder.
1312 __ LoadP(signature, FieldMemOperand(function_template_info,
1313 FunctionTemplateInfo::kSignatureOffset));
1314 Label receiver_check_passed;
1315 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1316 &receiver_check_passed);
1317
1318 // Walk the prototype chain.
1319 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1320 Label prototype_loop_start;
1321 __ bind(&prototype_loop_start);
1322
1323 // Get the constructor, if any.
1324 __ GetMapConstructor(constructor, map, scratch, scratch);
1325 __ cmpi(scratch, Operand(JS_FUNCTION_TYPE));
1326 Label next_prototype;
1327 __ bne(&next_prototype);
1328 Register type = constructor;
1329 __ LoadP(type,
1330 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1331 __ LoadP(type,
1332 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1333
1334 // Loop through the chain of inheriting function templates.
1335 Label function_template_loop;
1336 __ bind(&function_template_loop);
1337
1338 // If the signatures match, we have a compatible receiver.
1339 __ cmp(signature, type);
1340 __ beq(&receiver_check_passed);
1341
1342 // If the current type is not a FunctionTemplateInfo, load the next prototype
1343 // in the chain.
1344 __ JumpIfSmi(type, &next_prototype);
1345 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1346 __ bne(&next_prototype);
1347
1348 // Otherwise load the parent function template and iterate.
1349 __ LoadP(type,
1350 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1351 __ b(&function_template_loop);
1352
1353 // Load the next prototype.
1354 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001355 __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001356 __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001357 __ beq(receiver_check_failed, cr0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001358
1359 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1360 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 // Iterate.
1362 __ b(&prototype_loop_start);
1363
1364 __ bind(&receiver_check_passed);
1365}
1366
1367
1368void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1369 // ----------- S t a t e -------------
1370 // -- r3 : number of arguments excluding receiver
1371 // -- r4 : callee
1372 // -- lr : return address
1373 // -- sp[0] : last argument
1374 // -- ...
1375 // -- sp[4 * (argc - 1)] : first argument
1376 // -- sp[4 * argc] : receiver
1377 // -----------------------------------
1378
1379
1380 // Load the FunctionTemplateInfo.
1381 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1382 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
1383
1384 // Do the compatible receiver check.
1385 Label receiver_check_failed;
1386 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2));
1387 __ LoadPX(r5, MemOperand(sp, r11));
1388 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed);
1389
1390 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1391 // beginning of the code.
1392 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset));
1393 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset));
1394 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1395 __ JumpToJSEntry(ip);
1396
1397 // Compatible receiver check failed: throw an Illegal Invocation exception.
1398 __ bind(&receiver_check_failed);
1399 // Drop the arguments (including the receiver);
1400 __ addi(r11, r11, Operand(kPointerSize));
1401 __ add(sp, sp, r11);
1402 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1403}
1404
1405
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001406void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1407 // Lookup the function in the JavaScript frame.
1408 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1409 {
1410 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1411 // Pass function as argument.
1412 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001413 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001414 }
1415
1416 // If the code object is null, just return to the unoptimized code.
1417 Label skip;
1418 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1419 __ bne(&skip);
1420 __ Ret();
1421
1422 __ bind(&skip);
1423
1424 // Load deoptimization data from the code object.
1425 // <deopt_data> = <code>[#deoptimization_data_offset]
1426 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1427
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001428 {
1429 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001430 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1431
1432 if (FLAG_enable_embedded_constant_pool) {
1433 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1434 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001435
1436 // Load the OSR entrypoint offset from the deoptimization data.
1437 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1438 __ LoadP(r4, FieldMemOperand(
1439 r4, FixedArray::OffsetOfElementAt(
1440 DeoptimizationInputData::kOsrPcOffsetIndex)));
1441 __ SmiUntag(r4);
1442
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001443 // Compute the target address = code start + osr_offset
1444 __ add(r0, r3, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001445
1446 // And "return" to the OSR entry point of the function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001447 __ mtlr(r0);
1448 __ blr();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001449 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001450}
1451
1452
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453// static
1454void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1455 int field_index) {
1456 // ----------- S t a t e -------------
1457 // -- lr : return address
1458 // -- sp[0] : receiver
1459 // -----------------------------------
1460
1461 // 1. Pop receiver into r3 and check that it's actually a JSDate object.
1462 Label receiver_not_date;
1463 {
1464 __ Pop(r3);
1465 __ JumpIfSmi(r3, &receiver_not_date);
1466 __ CompareObjectType(r3, r4, r5, JS_DATE_TYPE);
1467 __ bne(&receiver_not_date);
1468 }
1469
1470 // 2. Load the specified date field, falling back to the runtime as necessary.
1471 if (field_index == JSDate::kDateValue) {
1472 __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset));
1473 } else {
1474 if (field_index < JSDate::kFirstUncachedField) {
1475 Label stamp_mismatch;
1476 __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1477 __ LoadP(r4, MemOperand(r4));
1478 __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset));
1479 __ cmp(r4, ip);
1480 __ bne(&stamp_mismatch);
1481 __ LoadP(r3, FieldMemOperand(
1482 r3, JSDate::kValueOffset + field_index * kPointerSize));
1483 __ Ret();
1484 __ bind(&stamp_mismatch);
1485 }
1486 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1487 __ PrepareCallCFunction(2, r4);
1488 __ LoadSmiLiteral(r4, Smi::FromInt(field_index));
1489 __ CallCFunction(
1490 ExternalReference::get_date_field_function(masm->isolate()), 2);
1491 }
1492 __ Ret();
1493
1494 // 3. Raise a TypeError if the receiver is not a date.
1495 __ bind(&receiver_not_date);
1496 __ TailCallRuntime(Runtime::kThrowNotDateError);
1497}
1498
Ben Murdochda12d292016-06-02 14:46:10 +01001499// static
1500void Builtins::Generate_FunctionHasInstance(MacroAssembler* masm) {
1501 // ----------- S t a t e -------------
1502 // -- r3 : argc
1503 // -- sp[0] : first argument (left-hand side)
1504 // -- sp[4] : receiver (right-hand side)
1505 // -----------------------------------
1506
1507 {
1508 FrameScope scope(masm, StackFrame::INTERNAL);
1509 __ LoadP(InstanceOfDescriptor::LeftRegister(),
1510 MemOperand(fp, 2 * kPointerSize)); // Load left-hand side.
1511 __ LoadP(InstanceOfDescriptor::RightRegister(),
1512 MemOperand(fp, 3 * kPointerSize)); // Load right-hand side.
1513 InstanceOfStub stub(masm->isolate(), true);
1514 __ CallStub(&stub);
1515 }
1516
1517 // Pop the argument and the receiver.
1518 __ Ret(2);
1519}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001520
1521// static
1522void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1523 // ----------- S t a t e -------------
1524 // -- r3 : argc
1525 // -- sp[0] : argArray
1526 // -- sp[4] : thisArg
1527 // -- sp[8] : receiver
1528 // -----------------------------------
1529
1530 // 1. Load receiver into r4, argArray into r3 (if present), remove all
1531 // arguments from the stack (including the receiver), and push thisArg (if
1532 // present) instead.
1533 {
1534 Label skip;
1535 Register arg_size = r5;
1536 Register new_sp = r6;
1537 Register scratch = r7;
1538 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1539 __ add(new_sp, sp, arg_size);
1540 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1541 __ mr(scratch, r3);
1542 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver
1543 __ cmpi(arg_size, Operand(kPointerSize));
1544 __ blt(&skip);
1545 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1546 __ beq(&skip);
1547 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1548 __ bind(&skip);
1549 __ mr(sp, new_sp);
1550 __ StoreP(scratch, MemOperand(sp, 0));
1551 }
1552
1553 // ----------- S t a t e -------------
1554 // -- r3 : argArray
1555 // -- r4 : receiver
1556 // -- sp[0] : thisArg
1557 // -----------------------------------
1558
1559 // 2. Make sure the receiver is actually callable.
1560 Label receiver_not_callable;
1561 __ JumpIfSmi(r4, &receiver_not_callable);
1562 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1563 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1564 __ TestBit(r7, Map::kIsCallable, r0);
1565 __ beq(&receiver_not_callable, cr0);
1566
1567 // 3. Tail call with no arguments if argArray is null or undefined.
1568 Label no_arguments;
1569 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
1570 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
1571
1572 // 4a. Apply the receiver to the given argArray (passing undefined for
1573 // new.target).
1574 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1575 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1576
1577 // 4b. The argArray is either null or undefined, so we tail call without any
1578 // arguments to the receiver.
1579 __ bind(&no_arguments);
1580 {
1581 __ li(r3, Operand::Zero());
1582 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1583 }
1584
1585 // 4c. The receiver is not callable, throw an appropriate TypeError.
1586 __ bind(&receiver_not_callable);
1587 {
1588 __ StoreP(r4, MemOperand(sp, 0));
1589 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1590 }
1591}
1592
1593
1594// static
1595void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001596 // 1. Make sure we have at least one argument.
1597 // r3: actual number of arguments
1598 {
1599 Label done;
1600 __ cmpi(r3, Operand::Zero());
1601 __ bne(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001602 __ PushRoot(Heap::kUndefinedValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001603 __ addi(r3, r3, Operand(1));
1604 __ bind(&done);
1605 }
1606
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001607 // 2. Get the callable to call (passed as receiver) from the stack.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001608 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1610 __ LoadPX(r4, MemOperand(sp, r5));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001611
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001612 // 3. Shift arguments and return address one slot down on the stack
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001613 // (overwriting the original receiver). Adjust argument count to make
1614 // the original first argument the new receiver.
1615 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001616 // r4: callable
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001617 {
1618 Label loop;
1619 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001620 __ add(r5, sp, r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001621
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001622
1623 __ mtctr(r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001624 __ bind(&loop);
1625 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1626 __ StoreP(ip, MemOperand(r5));
1627 __ subi(r5, r5, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 __ bdnz(&loop);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001629 // Adjust the actual number of arguments and remove the top element
1630 // (which is a copy of the last argument).
1631 __ subi(r3, r3, Operand(1));
1632 __ pop();
1633 }
1634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001635 // 4. Call the callable.
1636 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001637}
1638
1639
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001640void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1641 // ----------- S t a t e -------------
1642 // -- r3 : argc
1643 // -- sp[0] : argumentsList
1644 // -- sp[4] : thisArgument
1645 // -- sp[8] : target
1646 // -- sp[12] : receiver
1647 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001648
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001649 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1650 // remove all arguments from the stack (including the receiver), and push
1651 // thisArgument (if present) instead.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001652 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001653 Label skip;
1654 Register arg_size = r5;
1655 Register new_sp = r6;
1656 Register scratch = r7;
1657 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1658 __ add(new_sp, sp, arg_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001659 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001660 __ mr(scratch, r4);
1661 __ mr(r3, r4);
1662 __ cmpi(arg_size, Operand(kPointerSize));
1663 __ blt(&skip);
1664 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1665 __ beq(&skip);
1666 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1667 __ cmpi(arg_size, Operand(2 * kPointerSize));
1668 __ beq(&skip);
1669 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1670 __ bind(&skip);
1671 __ mr(sp, new_sp);
1672 __ StoreP(scratch, MemOperand(sp, 0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001673 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001674
1675 // ----------- S t a t e -------------
1676 // -- r3 : argumentsList
1677 // -- r4 : target
1678 // -- sp[0] : thisArgument
1679 // -----------------------------------
1680
1681 // 2. Make sure the target is actually callable.
1682 Label target_not_callable;
1683 __ JumpIfSmi(r4, &target_not_callable);
1684 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1685 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1686 __ TestBit(r7, Map::kIsCallable, r0);
1687 __ beq(&target_not_callable, cr0);
1688
1689 // 3a. Apply the target to the given argumentsList (passing undefined for
1690 // new.target).
1691 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1692 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1693
1694 // 3b. The target is not callable, throw an appropriate TypeError.
1695 __ bind(&target_not_callable);
1696 {
1697 __ StoreP(r4, MemOperand(sp, 0));
1698 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1699 }
1700}
1701
1702
1703void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1704 // ----------- S t a t e -------------
1705 // -- r3 : argc
1706 // -- sp[0] : new.target (optional)
1707 // -- sp[4] : argumentsList
1708 // -- sp[8] : target
1709 // -- sp[12] : receiver
1710 // -----------------------------------
1711
1712 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1713 // new.target into r6 (if present, otherwise use target), remove all
1714 // arguments from the stack (including the receiver), and push thisArgument
1715 // (if present) instead.
1716 {
1717 Label skip;
1718 Register arg_size = r5;
1719 Register new_sp = r7;
1720 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1721 __ add(new_sp, sp, arg_size);
1722 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1723 __ mr(r3, r4);
1724 __ mr(r6, r4);
1725 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
1726 __ cmpi(arg_size, Operand(kPointerSize));
1727 __ blt(&skip);
1728 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1729 __ mr(r6, r4); // new.target defaults to target
1730 __ beq(&skip);
1731 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1732 __ cmpi(arg_size, Operand(2 * kPointerSize));
1733 __ beq(&skip);
1734 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1735 __ bind(&skip);
1736 __ mr(sp, new_sp);
1737 }
1738
1739 // ----------- S t a t e -------------
1740 // -- r3 : argumentsList
1741 // -- r6 : new.target
1742 // -- r4 : target
1743 // -- sp[0] : receiver (undefined)
1744 // -----------------------------------
1745
1746 // 2. Make sure the target is actually a constructor.
1747 Label target_not_constructor;
1748 __ JumpIfSmi(r4, &target_not_constructor);
1749 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1750 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1751 __ TestBit(r7, Map::kIsConstructor, r0);
1752 __ beq(&target_not_constructor, cr0);
1753
1754 // 3. Make sure the target is actually a constructor.
1755 Label new_target_not_constructor;
1756 __ JumpIfSmi(r6, &new_target_not_constructor);
1757 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
1758 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1759 __ TestBit(r7, Map::kIsConstructor, r0);
1760 __ beq(&new_target_not_constructor, cr0);
1761
1762 // 4a. Construct the target with the given new.target and argumentsList.
1763 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1764
1765 // 4b. The target is not a constructor, throw an appropriate TypeError.
1766 __ bind(&target_not_constructor);
1767 {
1768 __ StoreP(r4, MemOperand(sp, 0));
1769 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1770 }
1771
1772 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1773 __ bind(&new_target_not_constructor);
1774 {
1775 __ StoreP(r6, MemOperand(sp, 0));
1776 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1777 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001778}
1779
1780
1781static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1782 Label* stack_overflow) {
1783 // ----------- S t a t e -------------
1784 // -- r3 : actual number of arguments
1785 // -- r4 : function (passed through to callee)
1786 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001787 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001788 // -----------------------------------
1789 // Check the stack for overflow. We are not trying to catch
1790 // interruptions (e.g. debug break and preemption) here, so the "real stack
1791 // limit" is checked.
1792 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1793 // Make r8 the space we have left. The stack might already be overflowed
1794 // here which will cause r8 to become negative.
1795 __ sub(r8, sp, r8);
1796 // Check if the arguments will overflow the stack.
1797 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1798 __ cmp(r8, r0);
1799 __ ble(stack_overflow); // Signed comparison.
1800}
1801
1802
1803static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1804 __ SmiTag(r3);
1805 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1806 __ mflr(r0);
1807 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001808 if (FLAG_enable_embedded_constant_pool) {
1809 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1810 } else {
1811 __ Push(fp, r7, r4, r3);
1812 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001813 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1814 kPointerSize));
1815}
1816
1817
1818static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1819 // ----------- S t a t e -------------
1820 // -- r3 : result being passed through
1821 // -----------------------------------
1822 // Get the number of arguments passed (as a smi), tear down the frame and
1823 // then tear down the parameters.
1824 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1825 kPointerSize)));
1826 int stack_adjustment = kPointerSize; // adjust for receiver
1827 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1828 __ SmiToPtrArrayOffset(r0, r4);
1829 __ add(sp, sp, r0);
1830}
1831
1832
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001833// static
1834void Builtins::Generate_Apply(MacroAssembler* masm) {
1835 // ----------- S t a t e -------------
1836 // -- r3 : argumentsList
1837 // -- r4 : target
1838 // -- r6 : new.target (checked to be constructor or undefined)
1839 // -- sp[0] : thisArgument
1840 // -----------------------------------
1841
1842 // Create the list of arguments from the array-like argumentsList.
1843 {
1844 Label create_arguments, create_array, create_runtime, done_create;
1845 __ JumpIfSmi(r3, &create_runtime);
1846
1847 // Load the map of argumentsList into r5.
1848 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1849
1850 // Load native context into r7.
1851 __ LoadP(r7, NativeContextMemOperand());
1852
1853 // Check if argumentsList is an (unmodified) arguments object.
1854 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1855 __ cmp(ip, r5);
1856 __ beq(&create_arguments);
1857 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
1858 __ cmp(ip, r5);
1859 __ beq(&create_arguments);
1860
1861 // Check if argumentsList is a fast JSArray.
1862 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
1863 __ beq(&create_array);
1864
1865 // Ask the runtime to create the list (actually a FixedArray).
1866 __ bind(&create_runtime);
1867 {
1868 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1869 __ Push(r4, r6, r3);
1870 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1871 __ Pop(r4, r6);
1872 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
1873 __ SmiUntag(r5);
1874 }
1875 __ b(&done_create);
1876
1877 // Try to create the list from an arguments object.
1878 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001879 __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001880 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
1881 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
1882 __ cmp(r5, ip);
1883 __ bne(&create_runtime);
1884 __ SmiUntag(r5);
1885 __ mr(r3, r7);
1886 __ b(&done_create);
1887
1888 // Try to create the list from a JSArray object.
1889 __ bind(&create_array);
1890 __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset));
1891 __ DecodeField<Map::ElementsKindBits>(r5);
1892 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1893 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1894 STATIC_ASSERT(FAST_ELEMENTS == 2);
1895 __ cmpi(r5, Operand(FAST_ELEMENTS));
1896 __ bgt(&create_runtime);
1897 __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS));
1898 __ beq(&create_runtime);
1899 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
1900 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
1901 __ SmiUntag(r5);
1902
1903 __ bind(&done_create);
1904 }
1905
1906 // Check for stack overflow.
1907 {
1908 // Check the stack for overflow. We are not trying to catch interruptions
1909 // (i.e. debug break and preemption) here, so check the "real stack limit".
1910 Label done;
1911 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1912 // Make ip the space we have left. The stack might already be overflowed
1913 // here which will cause ip to become negative.
1914 __ sub(ip, sp, ip);
1915 // Check if the arguments will overflow the stack.
1916 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1917 __ cmp(ip, r0); // Signed comparison.
1918 __ bgt(&done);
1919 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1920 __ bind(&done);
1921 }
1922
1923 // ----------- S t a t e -------------
1924 // -- r4 : target
1925 // -- r3 : args (a FixedArray built from argumentsList)
1926 // -- r5 : len (number of elements to push from args)
1927 // -- r6 : new.target (checked to be constructor or undefined)
1928 // -- sp[0] : thisArgument
1929 // -----------------------------------
1930
1931 // Push arguments onto the stack (thisArgument is already on the stack).
1932 {
1933 Label loop, no_args;
1934 __ cmpi(r5, Operand::Zero());
1935 __ beq(&no_args);
1936 __ addi(r3, r3,
1937 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1938 __ mtctr(r5);
1939 __ bind(&loop);
1940 __ LoadPU(r0, MemOperand(r3, kPointerSize));
1941 __ push(r0);
1942 __ bdnz(&loop);
1943 __ bind(&no_args);
1944 __ mr(r3, r5);
1945 }
1946
1947 // Dispatch to Call or Construct depending on whether new.target is undefined.
1948 {
1949 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
1950 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1951 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1952 }
1953}
1954
Ben Murdoch097c5b22016-05-18 11:27:45 +01001955namespace {
1956
1957// Drops top JavaScript frame and an arguments adaptor frame below it (if
1958// present) preserving all the arguments prepared for current call.
1959// Does nothing if debugger is currently active.
1960// ES6 14.6.3. PrepareForTailCall
1961//
1962// Stack structure for the function g() tail calling f():
1963//
1964// ------- Caller frame: -------
1965// | ...
1966// | g()'s arg M
1967// | ...
1968// | g()'s arg 1
1969// | g()'s receiver arg
1970// | g()'s caller pc
1971// ------- g()'s frame: -------
1972// | g()'s caller fp <- fp
1973// | g()'s context
1974// | function pointer: g
1975// | -------------------------
1976// | ...
1977// | ...
1978// | f()'s arg N
1979// | ...
1980// | f()'s arg 1
1981// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1982// ----------------------
1983//
1984void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1985 Register scratch1, Register scratch2,
1986 Register scratch3) {
1987 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1988 Comment cmnt(masm, "[ PrepareForTailCall");
1989
Ben Murdochda12d292016-06-02 14:46:10 +01001990 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001991 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01001992 ExternalReference is_tail_call_elimination_enabled =
1993 ExternalReference::is_tail_call_elimination_enabled_address(
1994 masm->isolate());
1995 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001996 __ lbz(scratch1, MemOperand(scratch1));
1997 __ cmpi(scratch1, Operand::Zero());
Ben Murdochda12d292016-06-02 14:46:10 +01001998 __ beq(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001999
2000 // Drop possible interpreter handler/stub frame.
2001 {
2002 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002003 __ LoadP(scratch3,
2004 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002005 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
2006 __ bne(&no_interpreter_frame);
2007 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2008 __ bind(&no_interpreter_frame);
2009 }
2010
2011 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002012 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002013 Label no_arguments_adaptor, formal_parameter_count_loaded;
2014 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002015 __ LoadP(
2016 scratch3,
2017 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002018 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2019 __ bne(&no_arguments_adaptor);
2020
Ben Murdochda12d292016-06-02 14:46:10 +01002021 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002022 __ mr(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002023 __ LoadP(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002024 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002025 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002026 __ b(&formal_parameter_count_loaded);
2027
2028 __ bind(&no_arguments_adaptor);
2029 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01002030 __ LoadP(scratch1,
2031 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002032 __ LoadP(scratch1,
2033 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2034 __ LoadWordArith(
Ben Murdochda12d292016-06-02 14:46:10 +01002035 caller_args_count_reg,
2036 FieldMemOperand(scratch1,
2037 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002038#if !V8_TARGET_ARCH_PPC64
Ben Murdochda12d292016-06-02 14:46:10 +01002039 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002040#endif
2041
2042 __ bind(&formal_parameter_count_loaded);
2043
Ben Murdochda12d292016-06-02 14:46:10 +01002044 ParameterCount callee_args_count(args_reg);
2045 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2046 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002047 __ bind(&done);
2048}
2049} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002050
2051// static
2052void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002053 ConvertReceiverMode mode,
2054 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002055 // ----------- S t a t e -------------
2056 // -- r3 : the number of arguments (not including the receiver)
2057 // -- r4 : the function to call (checked to be a JSFunction)
2058 // -----------------------------------
2059 __ AssertFunction(r4);
2060
2061 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2062 // Check that the function is not a "classConstructor".
2063 Label class_constructor;
2064 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2065 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
2066 __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0);
2067 __ bne(&class_constructor, cr0);
2068
2069 // Enter the context of the function; ToObject has to run in the function
2070 // context, and we also need to take the global proxy from the function
2071 // context in case of conversion.
2072 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2073 // We need to convert the receiver for non-native sloppy mode functions.
2074 Label done_convert;
2075 __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2076 (1 << SharedFunctionInfo::kNativeBit)));
2077 __ bne(&done_convert, cr0);
2078 {
2079 // ----------- S t a t e -------------
2080 // -- r3 : the number of arguments (not including the receiver)
2081 // -- r4 : the function to call (checked to be a JSFunction)
2082 // -- r5 : the shared function info.
2083 // -- cp : the function context.
2084 // -----------------------------------
2085
2086 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2087 // Patch receiver to global proxy.
2088 __ LoadGlobalProxy(r6);
2089 } else {
2090 Label convert_to_object, convert_receiver;
2091 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
2092 __ LoadPX(r6, MemOperand(sp, r6));
2093 __ JumpIfSmi(r6, &convert_to_object);
2094 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2095 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2096 __ bge(&done_convert);
2097 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2098 Label convert_global_proxy;
2099 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
2100 &convert_global_proxy);
2101 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
2102 __ bind(&convert_global_proxy);
2103 {
2104 // Patch receiver to global proxy.
2105 __ LoadGlobalProxy(r6);
2106 }
2107 __ b(&convert_receiver);
2108 }
2109 __ bind(&convert_to_object);
2110 {
2111 // Convert receiver using ToObject.
2112 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2113 // in the fast case? (fall back to AllocateInNewSpace?)
2114 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2115 __ SmiTag(r3);
2116 __ Push(r3, r4);
2117 __ mr(r3, r6);
2118 ToObjectStub stub(masm->isolate());
2119 __ CallStub(&stub);
2120 __ mr(r6, r3);
2121 __ Pop(r3, r4);
2122 __ SmiUntag(r3);
2123 }
2124 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2125 __ bind(&convert_receiver);
2126 }
2127 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2128 __ StorePX(r6, MemOperand(sp, r7));
2129 }
2130 __ bind(&done_convert);
2131
2132 // ----------- S t a t e -------------
2133 // -- r3 : the number of arguments (not including the receiver)
2134 // -- r4 : the function to call (checked to be a JSFunction)
2135 // -- r5 : the shared function info.
2136 // -- cp : the function context.
2137 // -----------------------------------
2138
Ben Murdoch097c5b22016-05-18 11:27:45 +01002139 if (tail_call_mode == TailCallMode::kAllow) {
2140 PrepareForTailCall(masm, r3, r6, r7, r8);
2141 }
2142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002143 __ LoadWordArith(
2144 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2145#if !V8_TARGET_ARCH_PPC64
2146 __ SmiUntag(r5);
2147#endif
2148 ParameterCount actual(r3);
2149 ParameterCount expected(r5);
2150 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION,
2151 CheckDebugStepCallWrapper());
2152
2153 // The function is a "classConstructor", need to raise an exception.
2154 __ bind(&class_constructor);
2155 {
2156 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2157 __ push(r4);
2158 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2159 }
2160}
2161
2162
2163namespace {
2164
2165void Generate_PushBoundArguments(MacroAssembler* masm) {
2166 // ----------- S t a t e -------------
2167 // -- r3 : the number of arguments (not including the receiver)
2168 // -- r4 : target (checked to be a JSBoundFunction)
2169 // -- r6 : new.target (only in case of [[Construct]])
2170 // -----------------------------------
2171
2172 // Load [[BoundArguments]] into r5 and length of that into r7.
2173 Label no_bound_arguments;
2174 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2175 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2176 __ SmiUntag(r7, SetRC);
2177 __ beq(&no_bound_arguments, cr0);
2178 {
2179 // ----------- S t a t e -------------
2180 // -- r3 : the number of arguments (not including the receiver)
2181 // -- r4 : target (checked to be a JSBoundFunction)
2182 // -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2183 // -- r6 : new.target (only in case of [[Construct]])
2184 // -- r7 : the number of [[BoundArguments]]
2185 // -----------------------------------
2186
2187 // Reserve stack space for the [[BoundArguments]].
2188 {
2189 Label done;
2190 __ mr(r9, sp); // preserve previous stack pointer
2191 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2192 __ sub(sp, sp, r10);
2193 // Check the stack for overflow. We are not trying to catch interruptions
2194 // (i.e. debug break and preemption) here, so check the "real stack
2195 // limit".
2196 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2197 __ bgt(&done); // Signed comparison.
2198 // Restore the stack pointer.
2199 __ mr(sp, r9);
2200 {
2201 FrameScope scope(masm, StackFrame::MANUAL);
2202 __ EnterFrame(StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002203 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002204 }
2205 __ bind(&done);
2206 }
2207
2208 // Relocate arguments down the stack.
2209 // -- r3 : the number of arguments (not including the receiver)
2210 // -- r9 : the previous stack pointer
2211 // -- r10: the size of the [[BoundArguments]]
2212 {
2213 Label skip, loop;
2214 __ li(r8, Operand::Zero());
2215 __ cmpi(r3, Operand::Zero());
2216 __ beq(&skip);
2217 __ mtctr(r3);
2218 __ bind(&loop);
2219 __ LoadPX(r0, MemOperand(r9, r8));
2220 __ StorePX(r0, MemOperand(sp, r8));
2221 __ addi(r8, r8, Operand(kPointerSize));
2222 __ bdnz(&loop);
2223 __ bind(&skip);
2224 }
2225
2226 // Copy [[BoundArguments]] to the stack (below the arguments).
2227 {
2228 Label loop;
2229 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2230 __ add(r5, r5, r10);
2231 __ mtctr(r7);
2232 __ bind(&loop);
2233 __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2234 __ StorePX(r0, MemOperand(sp, r8));
2235 __ addi(r8, r8, Operand(kPointerSize));
2236 __ bdnz(&loop);
2237 __ add(r3, r3, r7);
2238 }
2239 }
2240 __ bind(&no_bound_arguments);
2241}
2242
2243} // namespace
2244
2245
2246// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002247void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2248 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002249 // ----------- S t a t e -------------
2250 // -- r3 : the number of arguments (not including the receiver)
2251 // -- r4 : the function to call (checked to be a JSBoundFunction)
2252 // -----------------------------------
2253 __ AssertBoundFunction(r4);
2254
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255 if (tail_call_mode == TailCallMode::kAllow) {
2256 PrepareForTailCall(masm, r3, r6, r7, r8);
2257 }
2258
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 // Patch the receiver to [[BoundThis]].
2260 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2261 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2262 __ StorePX(ip, MemOperand(sp, r0));
2263
2264 // Push the [[BoundArguments]] onto the stack.
2265 Generate_PushBoundArguments(masm);
2266
2267 // Call the [[BoundTargetFunction]] via the Call builtin.
2268 __ LoadP(r4,
2269 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2270 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2271 masm->isolate())));
2272 __ LoadP(ip, MemOperand(ip));
2273 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2274 __ JumpToJSEntry(ip);
2275}
2276
2277
2278// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002279void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2280 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002281 // ----------- S t a t e -------------
2282 // -- r3 : the number of arguments (not including the receiver)
2283 // -- r4 : the target to call (can be any Object).
2284 // -----------------------------------
2285
2286 Label non_callable, non_function, non_smi;
2287 __ JumpIfSmi(r4, &non_callable);
2288 __ bind(&non_smi);
2289 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002290 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002291 RelocInfo::CODE_TARGET, eq);
2292 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002293 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002295
2296 // Check if target has a [[Call]] internal method.
2297 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2298 __ TestBit(r7, Map::kIsCallable, r0);
2299 __ beq(&non_callable, cr0);
2300
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002301 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2302 __ bne(&non_function);
2303
Ben Murdoch097c5b22016-05-18 11:27:45 +01002304 // 0. Prepare for tail call if necessary.
2305 if (tail_call_mode == TailCallMode::kAllow) {
2306 PrepareForTailCall(masm, r3, r6, r7, r8);
2307 }
2308
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002309 // 1. Runtime fallback for Proxy [[Call]].
2310 __ Push(r4);
2311 // Increase the arguments size to include the pushed function and the
2312 // existing receiver on the stack.
2313 __ addi(r3, r3, Operand(2));
2314 // Tail-call to the runtime.
2315 __ JumpToExternalReference(
2316 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2317
2318 // 2. Call to something else, which might have a [[Call]] internal method (if
2319 // not we raise an exception).
2320 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002321 // Overwrite the original receiver the (original) target.
2322 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2323 __ StorePX(r4, MemOperand(sp, r8));
2324 // Let the "call_as_function_delegate" take care of the rest.
2325 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2326 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002327 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002328 RelocInfo::CODE_TARGET);
2329
2330 // 3. Call to something that is not callable.
2331 __ bind(&non_callable);
2332 {
2333 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2334 __ Push(r4);
2335 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2336 }
2337}
2338
2339
2340// static
2341void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2342 // ----------- S t a t e -------------
2343 // -- r3 : the number of arguments (not including the receiver)
2344 // -- r4 : the constructor to call (checked to be a JSFunction)
2345 // -- r6 : the new target (checked to be a constructor)
2346 // -----------------------------------
2347 __ AssertFunction(r4);
2348
2349 // Calling convention for function specific ConstructStubs require
2350 // r5 to contain either an AllocationSite or undefined.
2351 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2352
2353 // Tail call to the function-specific construct stub (still in the caller
2354 // context at this point).
2355 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2356 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2357 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2358 __ JumpToJSEntry(ip);
2359}
2360
2361
2362// static
2363void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2364 // ----------- S t a t e -------------
2365 // -- r3 : the number of arguments (not including the receiver)
2366 // -- r4 : the function to call (checked to be a JSBoundFunction)
2367 // -- r6 : the new target (checked to be a constructor)
2368 // -----------------------------------
2369 __ AssertBoundFunction(r4);
2370
2371 // Push the [[BoundArguments]] onto the stack.
2372 Generate_PushBoundArguments(masm);
2373
2374 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2375 Label skip;
2376 __ cmp(r4, r6);
2377 __ bne(&skip);
2378 __ LoadP(r6,
2379 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2380 __ bind(&skip);
2381
2382 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2383 __ LoadP(r4,
2384 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2385 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2386 __ LoadP(ip, MemOperand(ip));
2387 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2388 __ JumpToJSEntry(ip);
2389}
2390
2391
2392// static
2393void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2394 // ----------- S t a t e -------------
2395 // -- r3 : the number of arguments (not including the receiver)
2396 // -- r4 : the constructor to call (checked to be a JSProxy)
2397 // -- r6 : the new target (either the same as the constructor or
2398 // the JSFunction on which new was invoked initially)
2399 // -----------------------------------
2400
2401 // Call into the Runtime for Proxy [[Construct]].
2402 __ Push(r4, r6);
2403 // Include the pushed new_target, constructor and the receiver.
2404 __ addi(r3, r3, Operand(3));
2405 // Tail-call to the runtime.
2406 __ JumpToExternalReference(
2407 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2408}
2409
2410
2411// static
2412void Builtins::Generate_Construct(MacroAssembler* masm) {
2413 // ----------- S t a t e -------------
2414 // -- r3 : the number of arguments (not including the receiver)
2415 // -- r4 : the constructor to call (can be any Object)
2416 // -- r6 : the new target (either the same as the constructor or
2417 // the JSFunction on which new was invoked initially)
2418 // -----------------------------------
2419
2420 // Check if target is a Smi.
2421 Label non_constructor;
2422 __ JumpIfSmi(r4, &non_constructor);
2423
2424 // Dispatch based on instance type.
2425 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2426 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2427 RelocInfo::CODE_TARGET, eq);
2428
2429 // Check if target has a [[Construct]] internal method.
2430 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2431 __ TestBit(r5, Map::kIsConstructor, r0);
2432 __ beq(&non_constructor, cr0);
2433
2434 // Only dispatch to bound functions after checking whether they are
2435 // constructors.
2436 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2437 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2438 RelocInfo::CODE_TARGET, eq);
2439
2440 // Only dispatch to proxies after checking whether they are constructors.
2441 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2442 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2443 eq);
2444
2445 // Called Construct on an exotic Object with a [[Construct]] internal method.
2446 {
2447 // Overwrite the original receiver with the (original) target.
2448 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2449 __ StorePX(r4, MemOperand(sp, r8));
2450 // Let the "call_as_constructor_delegate" take care of the rest.
2451 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2452 __ Jump(masm->isolate()->builtins()->CallFunction(),
2453 RelocInfo::CODE_TARGET);
2454 }
2455
2456 // Called Construct on an Object that doesn't have a [[Construct]] internal
2457 // method.
2458 __ bind(&non_constructor);
2459 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2460 RelocInfo::CODE_TARGET);
2461}
2462
2463
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002464void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2465 // ----------- S t a t e -------------
2466 // -- r3 : actual number of arguments
2467 // -- r4 : function (passed through to callee)
2468 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002469 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002470 // -----------------------------------
2471
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002472 Label invoke, dont_adapt_arguments, stack_overflow;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002473
2474 Label enough, too_few;
2475 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2476 __ cmp(r3, r5);
2477 __ blt(&too_few);
2478 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2479 __ beq(&dont_adapt_arguments);
2480
2481 { // Enough parameters: actual >= expected
2482 __ bind(&enough);
2483 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002484 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002485
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002486 // Calculate copy start address into r3 and copy end address into r7.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002487 // r3: actual number of arguments as a smi
2488 // r4: function
2489 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002490 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002491 // ip: code entry to call
2492 __ SmiToPtrArrayOffset(r3, r3);
2493 __ add(r3, r3, fp);
2494 // adjust for return address and receiver
2495 __ addi(r3, r3, Operand(2 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002496 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2497 __ sub(r7, r3, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002498
2499 // Copy the arguments (including the receiver) to the new stack frame.
2500 // r3: copy start address
2501 // r4: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002502 // r5: expected number of arguments
2503 // r6: new target (passed through to callee)
2504 // r7: copy end address
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002505 // ip: code entry to call
2506
2507 Label copy;
2508 __ bind(&copy);
2509 __ LoadP(r0, MemOperand(r3, 0));
2510 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002511 __ cmp(r3, r7); // Compare before moving to next argument.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002512 __ subi(r3, r3, Operand(kPointerSize));
2513 __ bne(&copy);
2514
2515 __ b(&invoke);
2516 }
2517
2518 { // Too few parameters: Actual < expected
2519 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002520
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002521 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002522 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002523
2524 // Calculate copy start address into r0 and copy end address is fp.
2525 // r3: actual number of arguments as a smi
2526 // r4: function
2527 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002528 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002529 // ip: code entry to call
2530 __ SmiToPtrArrayOffset(r3, r3);
2531 __ add(r3, r3, fp);
2532
2533 // Copy the arguments (including the receiver) to the new stack frame.
2534 // r3: copy start address
2535 // r4: function
2536 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002537 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002538 // ip: code entry to call
2539 Label copy;
2540 __ bind(&copy);
2541 // Adjust load for return address and receiver.
2542 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2543 __ push(r0);
2544 __ cmp(r3, fp); // Compare before moving to next argument.
2545 __ subi(r3, r3, Operand(kPointerSize));
2546 __ bne(&copy);
2547
2548 // Fill the remaining expected arguments with undefined.
2549 // r4: function
2550 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002551 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002552 // ip: code entry to call
2553 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002554 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2555 __ sub(r7, fp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002556 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002557 __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002558 2 * kPointerSize));
2559
2560 Label fill;
2561 __ bind(&fill);
2562 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002563 __ cmp(sp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002564 __ bne(&fill);
2565 }
2566
2567 // Call the entry point.
2568 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002569 __ mr(r3, r5);
2570 // r3 : expected number of arguments
2571 // r4 : function (passed through to callee)
2572 // r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002573 __ CallJSEntry(ip);
2574
2575 // Store offset of return address for deoptimizer.
2576 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2577
2578 // Exit frame and return.
2579 LeaveArgumentsAdaptorFrame(masm);
2580 __ blr();
2581
2582
2583 // -------------------------------------------
2584 // Dont adapt arguments.
2585 // -------------------------------------------
2586 __ bind(&dont_adapt_arguments);
2587 __ JumpToJSEntry(ip);
2588
2589 __ bind(&stack_overflow);
2590 {
2591 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002592 __ CallRuntime(Runtime::kThrowStackOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002593 __ bkpt(0);
2594 }
2595}
2596
2597
2598#undef __
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002599} // namespace internal
2600} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002601
2602#endif // V8_TARGET_ARCH_PPC