blob: f0b76ccc39c88cdc8c9b2b79a5b7711da5cda9cf [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005#if V8_TARGET_ARCH_PPC
6
7#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
20void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
23 // -- r3 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000024 // -- r4 : target
25 // -- r6 : new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 // -- sp[0] : last argument
27 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028 // -- sp[4 * (argc - 1)] : first argument
Emily Bernierd0a1eb72015-03-24 16:35:39 -040029 // -- sp[4 * argc] : receiver
30 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 __ AssertFunction(r4);
32
33 // Make sure we operate in the context of the called function (for example
34 // ConstructStubs implemented in C++ will be run in the context of the caller
35 // instead of the callee, due to the way that [[Construct]] is defined for
36 // ordinary functions).
37 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038
39 // Insert extra arguments.
40 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 switch (extra_args) {
42 case BuiltinExtraArguments::kTarget:
43 __ Push(r4);
44 ++num_extra_args;
45 break;
46 case BuiltinExtraArguments::kNewTarget:
47 __ Push(r6);
48 ++num_extra_args;
49 break;
50 case BuiltinExtraArguments::kTargetAndNewTarget:
51 __ Push(r4, r6);
52 num_extra_args += 2;
53 break;
54 case BuiltinExtraArguments::kNone:
55 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 }
57
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000058 // JumpToExternalReference expects r3 to contain the number of arguments
Emily Bernierd0a1eb72015-03-24 16:35:39 -040059 // including the receiver and the extra arguments.
60 __ addi(r3, r3, Operand(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
63}
64
65
66// Load the built-in InternalArray function from the current context.
67static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
68 Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 // Load the InternalArray function from the current native context.
70 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071}
72
73
74// Load the built-in Array function from the current context.
75static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 // Load the Array function from the current native context.
77 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078}
79
80
81void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
82 // ----------- S t a t e -------------
83 // -- r3 : number of arguments
84 // -- lr : return address
85 // -- sp[...]: constructor arguments
86 // -----------------------------------
87 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
88
89 // Get the InternalArray function.
90 GenerateLoadInternalArrayFunction(masm, r4);
91
92 if (FLAG_debug_code) {
93 // Initial map for the builtin InternalArray functions should be maps.
94 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
95 __ TestIfSmi(r5, r0);
96 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
97 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
98 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
99 }
100
101 // Run the native code for the InternalArray function called as a normal
102 // function.
103 // tail call a stub
104 InternalArrayConstructorStub stub(masm->isolate());
105 __ TailCallStub(&stub);
106}
107
108
109void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
110 // ----------- S t a t e -------------
111 // -- r3 : number of arguments
112 // -- lr : return address
113 // -- sp[...]: constructor arguments
114 // -----------------------------------
115 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
116
117 // Get the Array function.
118 GenerateLoadArrayFunction(masm, r4);
119
120 if (FLAG_debug_code) {
121 // Initial map for the builtin Array functions should be maps.
122 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
123 __ TestIfSmi(r5, r0);
124 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
125 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
126 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
127 }
128
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000129 __ mr(r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400130 // Run the native code for the Array function called as a normal function.
131 // tail call a stub
132 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
133 ArrayConstructorStub stub(masm->isolate());
134 __ TailCallStub(&stub);
135}
136
137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000138// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
140 // ----------- S t a t e -------------
141 // -- r3 : number of arguments
142 // -- lr : return address
143 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
144 // -- sp[(argc + 1) * 8] : receiver
145 // -----------------------------------
146 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
147 Heap::RootListIndex const root_index =
148 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
149 : Heap::kMinusInfinityValueRootIndex;
150 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
151
152 // Load the accumulator with the default return value (either -Infinity or
153 // +Infinity), with the tagged value in r4 and the double value in d1.
154 __ LoadRoot(r4, root_index);
155 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
156
157 // Setup state for loop
158 // r5: address of arg[0] + kPointerSize
159 // r6: number of slots to drop at exit (arguments + receiver)
160 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
161 __ add(r5, sp, r5);
162 __ addi(r6, r3, Operand(1));
163
164 Label done_loop, loop;
165 __ bind(&loop);
166 {
167 // Check if all parameters done.
168 __ cmpl(r5, sp);
169 __ ble(&done_loop);
170
171 // Load the next parameter tagged value into r3.
172 __ LoadPU(r3, MemOperand(r5, -kPointerSize));
173
174 // Load the double value of the parameter into d2, maybe converting the
175 // parameter to a number first using the ToNumberStub if necessary.
176 Label convert, convert_smi, convert_number, done_convert;
177 __ bind(&convert);
178 __ JumpIfSmi(r3, &convert_smi);
179 __ LoadP(r7, FieldMemOperand(r3, HeapObject::kMapOffset));
180 __ JumpIfRoot(r7, Heap::kHeapNumberMapRootIndex, &convert_number);
181 {
182 // Parameter is not a Number, use the ToNumberStub to convert it.
183 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
184 __ SmiTag(r6);
185 __ Push(r4, r5, r6);
186 ToNumberStub stub(masm->isolate());
187 __ CallStub(&stub);
188 __ Pop(r4, r5, r6);
189 __ SmiUntag(r6);
190 {
191 // Restore the double accumulator value (d1).
192 Label done_restore;
193 __ SmiToDouble(d1, r4);
194 __ JumpIfSmi(r4, &done_restore);
195 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
196 __ bind(&done_restore);
197 }
198 }
199 __ b(&convert);
200 __ bind(&convert_number);
201 __ lfd(d2, FieldMemOperand(r3, HeapNumber::kValueOffset));
202 __ b(&done_convert);
203 __ bind(&convert_smi);
204 __ SmiToDouble(d2, r3);
205 __ bind(&done_convert);
206
207 // Perform the actual comparison with the accumulator value on the left hand
208 // side (d1) and the next parameter value on the right hand side (d2).
209 Label compare_nan, compare_swap;
210 __ fcmpu(d1, d2);
211 __ bunordered(&compare_nan);
212 __ b(cond_done, &loop);
213 __ b(CommuteCondition(cond_done), &compare_swap);
214
215 // Left and right hand side are equal, check for -0 vs. +0.
216 __ TestDoubleIsMinusZero(reg, r7, r8);
217 __ bne(&loop);
218
219 // Update accumulator. Result is on the right hand side.
220 __ bind(&compare_swap);
221 __ fmr(d1, d2);
222 __ mr(r4, r3);
223 __ b(&loop);
224
225 // At least one side is NaN, which means that the result will be NaN too.
226 // We still need to visit the rest of the arguments.
227 __ bind(&compare_nan);
228 __ LoadRoot(r4, Heap::kNanValueRootIndex);
229 __ lfd(d1, FieldMemOperand(r4, HeapNumber::kValueOffset));
230 __ b(&loop);
231 }
232
233 __ bind(&done_loop);
234 __ mr(r3, r4);
235 __ Drop(r6);
236 __ Ret();
237}
238
239// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400241 // ----------- S t a t e -------------
242 // -- r3 : number of arguments
243 // -- r4 : constructor function
244 // -- lr : return address
245 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
246 // -- sp[argc * 4] : receiver
247 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 // 1. Load the first argument into r3 and get rid of the rest (including the
250 // receiver).
251 Label no_arguments;
252 {
253 __ cmpi(r3, Operand::Zero());
254 __ beq(&no_arguments);
255 __ subi(r3, r3, Operand(1));
256 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
257 __ LoadPUX(r3, MemOperand(sp, r3));
258 __ Drop(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400259 }
260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 // 2a. Convert the first argument to a number.
262 ToNumberStub stub(masm->isolate());
263 __ TailCallStub(&stub);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 // 2b. No arguments, return +0.
266 __ bind(&no_arguments);
267 __ LoadSmiLiteral(r3, Smi::FromInt(0));
268 __ Ret(1);
269}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271
272// static
273void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400274 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 // -- r3 : number of arguments
276 // -- r4 : constructor function
277 // -- r6 : new target
278 // -- lr : return address
279 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
280 // -- sp[argc * 4] : receiver
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400281 // -----------------------------------
282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 // 1. Make sure we operate in the context of the called function.
284 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400285
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 // 2. Load the first argument into r5 and get rid of the rest (including the
287 // receiver).
288 {
289 Label no_arguments, done;
290 __ cmpi(r3, Operand::Zero());
291 __ beq(&no_arguments);
292 __ subi(r3, r3, Operand(1));
293 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
294 __ LoadPUX(r5, MemOperand(sp, r5));
295 __ Drop(2);
296 __ b(&done);
297 __ bind(&no_arguments);
298 __ LoadSmiLiteral(r5, Smi::FromInt(0));
299 __ Drop(1);
300 __ bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400301 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400302
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000303 // 3. Make sure r5 is a number.
304 {
305 Label done_convert;
306 __ JumpIfSmi(r5, &done_convert);
307 __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE);
308 __ beq(&done_convert);
309 {
310 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
311 __ Push(r4, r6);
312 __ mr(r3, r5);
313 ToNumberStub stub(masm->isolate());
314 __ CallStub(&stub);
315 __ mr(r5, r3);
316 __ Pop(r4, r6);
317 }
318 __ bind(&done_convert);
319 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 // 4. Check if new target and constructor differ.
322 Label new_object;
323 __ cmp(r4, r6);
324 __ bne(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 // 5. Allocate a JSValue wrapper for the number.
327 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400328 __ Ret();
329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 // 6. Fallback to the runtime to create new object.
331 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400332 {
333 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100334 __ Push(r5); // first argument
335 FastNewObjectStub stub(masm->isolate());
336 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400338 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000339 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
340 __ Ret();
341}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400342
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343
344// static
345void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
346 // ----------- S t a t e -------------
347 // -- r3 : number of arguments
348 // -- r4 : constructor function
349 // -- lr : return address
350 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
351 // -- sp[argc * 4] : receiver
352 // -----------------------------------
353
354 // 1. Load the first argument into r3 and get rid of the rest (including the
355 // receiver).
356 Label no_arguments;
357 {
358 __ cmpi(r3, Operand::Zero());
359 __ beq(&no_arguments);
360 __ subi(r3, r3, Operand(1));
361 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
362 __ LoadPUX(r3, MemOperand(sp, r3));
363 __ Drop(2);
364 }
365
366 // 2a. At least one argument, return r3 if it's a string, otherwise
367 // dispatch to appropriate conversion.
368 Label to_string, symbol_descriptive_string;
369 {
370 __ JumpIfSmi(r3, &to_string);
371 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
372 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
373 __ bgt(&to_string);
374 __ beq(&symbol_descriptive_string);
375 __ Ret();
376 }
377
378 // 2b. No arguments, return the empty string (and pop the receiver).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400379 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 {
381 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
382 __ Ret(1);
383 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000385 // 3a. Convert r3 to a string.
386 __ bind(&to_string);
387 {
388 ToStringStub stub(masm->isolate());
389 __ TailCallStub(&stub);
390 }
391
392 // 3b. Convert symbol in r3 to a string.
393 __ bind(&symbol_descriptive_string);
394 {
395 __ Push(r3);
396 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
397 }
398}
399
400
401// static
402void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
403 // ----------- S t a t e -------------
404 // -- r3 : number of arguments
405 // -- r4 : constructor function
406 // -- r6 : new target
407 // -- lr : return address
408 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
409 // -- sp[argc * 4] : receiver
410 // -----------------------------------
411
412 // 1. Make sure we operate in the context of the called function.
413 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
414
415 // 2. Load the first argument into r5 and get rid of the rest (including the
416 // receiver).
417 {
418 Label no_arguments, done;
419 __ cmpi(r3, Operand::Zero());
420 __ beq(&no_arguments);
421 __ subi(r3, r3, Operand(1));
422 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
423 __ LoadPUX(r5, MemOperand(sp, r5));
424 __ Drop(2);
425 __ b(&done);
426 __ bind(&no_arguments);
427 __ LoadRoot(r5, Heap::kempty_stringRootIndex);
428 __ Drop(1);
429 __ bind(&done);
430 }
431
432 // 3. Make sure r5 is a string.
433 {
434 Label convert, done_convert;
435 __ JumpIfSmi(r5, &convert);
436 __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE);
437 __ blt(&done_convert);
438 __ bind(&convert);
439 {
440 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
441 ToStringStub stub(masm->isolate());
442 __ Push(r4, r6);
443 __ mr(r3, r5);
444 __ CallStub(&stub);
445 __ mr(r5, r3);
446 __ Pop(r4, r6);
447 }
448 __ bind(&done_convert);
449 }
450
451 // 4. Check if new target and constructor differ.
452 Label new_object;
453 __ cmp(r4, r6);
454 __ bne(&new_object);
455
456 // 5. Allocate a JSValue wrapper for the string.
457 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
458 __ Ret();
459
460 // 6. Fallback to the runtime to create new object.
461 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400462 {
463 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 __ Push(r5); // first argument
465 FastNewObjectStub stub(masm->isolate());
466 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000467 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400468 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400470 __ Ret();
471}
472
473
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400474static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
475 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
476 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
477 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
478 __ JumpToJSEntry(ip);
479}
480
Ben Murdoch097c5b22016-05-18 11:27:45 +0100481static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
482 Runtime::FunctionId function_id) {
483 // ----------- S t a t e -------------
484 // -- r3 : argument count (preserved for callee)
485 // -- r4 : target function (preserved for callee)
486 // -- r6 : new target (preserved for callee)
487 // -----------------------------------
488 {
489 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
490 // Push the number of arguments to the callee.
491 // Push a copy of the target function and the new target.
492 // Push function as parameter to the runtime call.
493 __ SmiTag(r3);
494 __ Push(r3, r4, r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400495
Ben Murdoch097c5b22016-05-18 11:27:45 +0100496 __ CallRuntime(function_id, 1);
497 __ mr(r5, r3);
498
499 // Restore target function and new target.
500 __ Pop(r3, r4, r6);
501 __ SmiUntag(r3);
502 }
503 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504 __ JumpToJSEntry(ip);
505}
506
507
508void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
509 // Checking whether the queued function is ready for install is optional,
510 // since we come across interrupts and stack checks elsewhere. However,
511 // not checking may delay installing ready functions, and always checking
512 // would be quite expensive. A good compromise is to first check against
513 // stack limit as a cue for an interrupt signal.
514 Label ok;
515 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
516 __ cmpl(sp, ip);
517 __ bge(&ok);
518
Ben Murdoch097c5b22016-05-18 11:27:45 +0100519 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400520
521 __ bind(&ok);
522 GenerateTailCallToSharedCode(masm);
523}
524
525
526static void Generate_JSConstructStubHelper(MacroAssembler* masm,
527 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100528 bool create_implicit_receiver,
529 bool check_derived_construct) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400530 // ----------- S t a t e -------------
531 // -- r3 : number of arguments
532 // -- r4 : constructor function
533 // -- r5 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534 // -- r6 : new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400535 // -- lr : return address
536 // -- sp[...]: constructor arguments
537 // -----------------------------------
538
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400539 Isolate* isolate = masm->isolate();
540
541 // Enter a construct frame.
542 {
543 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 // Preserve the incoming parameters on the stack.
546 __ AssertUndefinedOrAllocationSite(r5, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400547
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000548 if (!create_implicit_receiver) {
549 __ SmiTag(r7, r3, SetRC);
550 __ Push(r5, r7);
551 __ PushRoot(Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400552 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000553 __ SmiTag(r3);
554 __ Push(r5, r3);
555
Ben Murdoch097c5b22016-05-18 11:27:45 +0100556 // Allocate the new receiver object.
557 __ Push(r4, r6);
558 FastNewObjectStub stub(masm->isolate());
559 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000560 __ mr(r7, r3);
561 __ Pop(r4, r6);
562
Ben Murdoch097c5b22016-05-18 11:27:45 +0100563 // ----------- S t a t e -------------
564 // -- r4: constructor function
565 // -- r6: new target
566 // -- r7: newly allocated object
567 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000568
569 // Retrieve smi-tagged arguments count from the stack.
570 __ LoadP(r3, MemOperand(sp));
571 __ SmiUntag(r3, SetRC);
572
573 // Push the allocated receiver to the stack. We need two copies
574 // because we may have to return the original one and the calling
575 // conventions dictate that the called function pops the receiver.
576 __ Push(r7, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400577 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400578
579 // Set up pointer to last argument.
580 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
581
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400582 // Copy arguments and receiver to the expression stack.
583 // r3: number of arguments
584 // r4: constructor function
585 // r5: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 // r6: new target
587 // cr0: condition indicating whether r3 is zero
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400588 // sp[0]: receiver
589 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 // sp[2]: number of arguments (smi-tagged)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400591 Label loop, no_args;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 __ beq(&no_args, cr0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400593 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594 __ sub(sp, sp, ip);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400595 __ mtctr(r3);
596 __ bind(&loop);
597 __ subi(ip, ip, Operand(kPointerSize));
598 __ LoadPX(r0, MemOperand(r5, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000599 __ StorePX(r0, MemOperand(sp, ip));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400600 __ bdnz(&loop);
601 __ bind(&no_args);
602
603 // Call the function.
604 // r3: number of arguments
605 // r4: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000606 // r6: new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400607 if (is_api_function) {
608 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
609 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
610 __ Call(code, RelocInfo::CODE_TARGET);
611 } else {
612 ParameterCount actual(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION,
614 CheckDebugStepCallWrapper());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400615 }
616
617 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000618 if (create_implicit_receiver && !is_api_function) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400619 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
620 }
621
622 // Restore context from the frame.
623 // r3: result
624 // sp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000625 // sp[1]: number of arguments (smi-tagged)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400626 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
627
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000628 if (create_implicit_receiver) {
629 // If the result is an object (in the ECMA sense), we should get rid
630 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
631 // on page 74.
632 Label use_receiver, exit;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400633
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 // If the result is a smi, it is *not* an object in the ECMA sense.
635 // r3: result
636 // sp[0]: receiver
637 // sp[1]: number of arguments (smi-tagged)
638 __ JumpIfSmi(r3, &use_receiver);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400639
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000640 // If the type of the result (stored in its map) is less than
641 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
642 __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE);
643 __ bge(&exit);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400644
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000645 // Throw away the result of the constructor invocation and use the
646 // on-stack receiver as the result.
647 __ bind(&use_receiver);
648 __ LoadP(r3, MemOperand(sp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400649
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000650 // Remove receiver from the stack, remove caller arguments, and
651 // return.
652 __ bind(&exit);
653 // r3: result
654 // sp[0]: receiver (newly allocated object)
655 // sp[1]: number of arguments (smi-tagged)
656 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
657 } else {
658 __ LoadP(r4, MemOperand(sp));
659 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400660
661 // Leave construct frame.
662 }
663
Ben Murdoch097c5b22016-05-18 11:27:45 +0100664 // ES6 9.2.2. Step 13+
665 // Check that the result is not a Smi, indicating that the constructor result
666 // from a derived class is neither undefined nor an Object.
667 if (check_derived_construct) {
668 Label dont_throw;
669 __ JumpIfNotSmi(r3, &dont_throw);
670 {
671 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
672 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
673 }
674 __ bind(&dont_throw);
675 }
676
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400677 __ SmiToPtrArrayOffset(r4, r4);
678 __ add(sp, sp, r4);
679 __ addi(sp, sp, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680 if (create_implicit_receiver) {
681 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
682 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400683 __ blr();
684}
685
686
687void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100688 Generate_JSConstructStubHelper(masm, false, true, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400689}
690
691
692void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100693 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694}
695
696
697void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100698 Generate_JSConstructStubHelper(masm, false, false, false);
699}
700
701
702void Builtins::Generate_JSBuiltinsConstructStubForDerived(
703 MacroAssembler* masm) {
704 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705}
706
707
708void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
709 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
710 __ push(r4);
711 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
712}
713
714
715enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
716
717
718// Clobbers r5; preserves all other registers.
719static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
720 IsTagged argc_is_tagged) {
721 // Check the stack for overflow. We are not trying to catch
722 // interruptions (e.g. debug break and preemption) here, so the "real stack
723 // limit" is checked.
724 Label okay;
725 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
726 // Make r5 the space we have left. The stack might already be overflowed
727 // here which will cause r5 to become negative.
728 __ sub(r5, sp, r5);
729 // Check if the arguments will overflow the stack.
730 if (argc_is_tagged == kArgcIsSmiTagged) {
731 __ SmiToPtrArrayOffset(r0, argc);
732 } else {
733 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
734 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
735 }
736 __ cmp(r5, r0);
737 __ bgt(&okay); // Signed comparison.
738
739 // Out of stack space.
740 __ CallRuntime(Runtime::kThrowStackOverflow);
741
742 __ bind(&okay);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400743}
744
745
746static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
747 bool is_construct) {
748 // Called from Generate_JS_Entry
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000749 // r3: new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400750 // r4: function
751 // r5: receiver
752 // r6: argc
753 // r7: argv
754 // r0,r8-r9, cp may be clobbered
755 ProfileEntryHookStub::MaybeCallEntryHook(masm);
756
757 // Clear the context before we push it when entering the internal frame.
758 __ li(cp, Operand::Zero());
759
760 // Enter an internal frame.
761 {
762 FrameScope scope(masm, StackFrame::INTERNAL);
763
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000764 // Setup the context (we need to use the caller context from the isolate).
765 ExternalReference context_address(Isolate::kContextAddress,
766 masm->isolate());
767 __ mov(cp, Operand(context_address));
768 __ LoadP(cp, MemOperand(cp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400769
770 __ InitializeRootRegister();
771
772 // Push the function and the receiver onto the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000773 __ Push(r4, r5);
774
775 // Check if we have enough stack space to push all arguments.
776 // Clobbers r5.
777 Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400778
779 // Copy arguments to the stack in a loop.
780 // r4: function
781 // r6: argc
782 // r7: argv, i.e. points to first arg
783 Label loop, entry;
784 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
785 __ add(r5, r7, r0);
786 // r5 points past last arg.
787 __ b(&entry);
788 __ bind(&loop);
789 __ LoadP(r8, MemOperand(r7)); // read next parameter
790 __ addi(r7, r7, Operand(kPointerSize));
791 __ LoadP(r0, MemOperand(r8)); // dereference handle
792 __ push(r0); // push parameter
793 __ bind(&entry);
794 __ cmp(r7, r5);
795 __ bne(&loop);
796
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 // Setup new.target and argc.
798 __ mr(r7, r3);
799 __ mr(r3, r6);
800 __ mr(r6, r7);
801
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400802 // Initialize all JavaScript callee-saved registers, since they will be seen
803 // by the garbage collector as part of handlers.
804 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
805 __ mr(r14, r7);
806 __ mr(r15, r7);
807 __ mr(r16, r7);
808 __ mr(r17, r7);
809
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 // Invoke the code.
811 Handle<Code> builtin = is_construct
812 ? masm->isolate()->builtins()->Construct()
813 : masm->isolate()->builtins()->Call();
814 __ Call(builtin, RelocInfo::CODE_TARGET);
815
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400816 // Exit the JS frame and remove the parameters (except function), and
817 // return.
818 }
819 __ blr();
820
821 // r3: result
822}
823
824
825void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
826 Generate_JSEntryTrampolineHelper(masm, false);
827}
828
829
830void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
831 Generate_JSEntryTrampolineHelper(masm, true);
832}
833
834
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000835// Generate code for entering a JS function with the interpreter.
836// On entry to the function the receiver and arguments have been pushed on the
837// stack left to right. The actual argument count matches the formal parameter
838// count expected by the function.
839//
840// The live registers are:
841// o r4: the JS function object being called.
842// o r6: the new target
843// o cp: our context
844// o pp: the caller's constant pool pointer (if enabled)
845// o fp: the caller's frame pointer
846// o sp: stack pointer
847// o lr: return address
848//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100849// The function builds an interpreter frame. See InterpreterFrameConstants in
850// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000851void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
852 // Open a frame scope to indicate that there is a frame on the stack. The
853 // MANUAL indicates that the scope shouldn't actually generate code to set up
854 // the frame (that is done below).
855 FrameScope frame_scope(masm, StackFrame::MANUAL);
856 __ PushFixedFrame(r4);
857 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000858
859 // Get the bytecode array from the function object and load the pointer to the
860 // first entry into kInterpreterBytecodeRegister.
861 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100862 Label array_done;
863 Register debug_info = r5;
864 DCHECK(!debug_info.is(r3));
865 __ LoadP(debug_info,
866 FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset));
867 // Load original bytecode array or the debug copy.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000868 __ LoadP(kInterpreterBytecodeArrayRegister,
869 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100870 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
871 __ beq(&array_done);
872 __ LoadP(kInterpreterBytecodeArrayRegister,
873 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
874 __ bind(&array_done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000875
876 if (FLAG_debug_code) {
877 // Check function data field is actually a BytecodeArray object.
878 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
879 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
880 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
881 BYTECODE_ARRAY_TYPE);
882 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
883 }
884
Ben Murdoch097c5b22016-05-18 11:27:45 +0100885 // Push new.target, bytecode array and zero for bytecode array offset.
886 __ li(r3, Operand::Zero());
887 __ Push(r6, kInterpreterBytecodeArrayRegister, r3);
888
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000889 // Allocate the local and temporary register file on the stack.
890 {
891 // Load frame size (word) from the BytecodeArray object.
892 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
893 BytecodeArray::kFrameSizeOffset));
894
895 // Do a stack check to ensure we don't go over the limit.
896 Label ok;
897 __ sub(r6, sp, r5);
898 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
899 __ cmpl(r6, r0);
900 __ bge(&ok);
901 __ CallRuntime(Runtime::kThrowStackOverflow);
902 __ bind(&ok);
903
904 // If ok, push undefined as the initial value for all register file entries.
905 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
906 Label loop, no_args;
907 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
908 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
909 __ beq(&no_args, cr0);
910 __ mtctr(r5);
911 __ bind(&loop);
912 __ push(r6);
913 __ bdnz(&loop);
914 __ bind(&no_args);
915 }
916
917 // TODO(rmcilroy): List of things not currently dealt with here but done in
918 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000919 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000920 // - Code aging of the BytecodeArray object.
921
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000922 // Load accumulator, register file, bytecode offset, dispatch table into
923 // registers.
924 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
925 __ addi(kInterpreterRegisterFileRegister, fp,
926 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
927 __ mov(kInterpreterBytecodeOffsetRegister,
928 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100929 __ mov(kInterpreterDispatchTableRegister,
930 Operand(ExternalReference::interpreter_dispatch_table_address(
931 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000932
933 // Dispatch to the first bytecode handler for the function.
934 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
935 kInterpreterBytecodeOffsetRegister));
936 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
937 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
938 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
939 // and header removal.
940 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
941 __ Call(ip);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100942
943 // Even though the first bytecode handler was called, we will never return.
944 __ Abort(kUnexpectedReturnFromBytecodeHandler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945}
946
947
948void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
949 // TODO(rmcilroy): List of things not currently dealt with here but done in
950 // fullcodegen's EmitReturnSequence.
951 // - Supporting FLAG_trace for Runtime::TraceExit.
952 // - Support profiler (specifically decrementing profiling_counter
953 // appropriately and calling out to HandleInterrupts if necessary).
954
955 // The return value is in accumulator, which is already in r3.
956
957 // Leave the frame (also dropping the register file).
958 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
959
960 // Drop receiver + arguments and return.
961 __ lwz(r0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
962 BytecodeArray::kParameterSizeOffset));
963 __ add(sp, sp, r0);
964 __ blr();
965}
966
967
968static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
969 Register count, Register scratch) {
970 Label loop;
971 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU
972 __ mtctr(count);
973 __ bind(&loop);
974 __ LoadPU(scratch, MemOperand(index, -kPointerSize));
975 __ push(scratch);
976 __ bdnz(&loop);
977}
978
979
980// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100981void Builtins::Generate_InterpreterPushArgsAndCallImpl(
982 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000983 // ----------- S t a t e -------------
984 // -- r3 : the number of arguments (not including the receiver)
985 // -- r5 : the address of the first argument to be pushed. Subsequent
986 // arguments should be consecutive above this, in the same order as
987 // they are to be pushed onto the stack.
988 // -- r4 : the target to call (can be any Object).
989 // -----------------------------------
990
991 // Calculate number of arguments (add one for receiver).
992 __ addi(r6, r3, Operand(1));
993
994 // Push the arguments.
995 Generate_InterpreterPushArgs(masm, r5, r6, r7);
996
997 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100998 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
999 tail_call_mode),
1000 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001}
1002
1003
1004// static
1005void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1006 // ----------- S t a t e -------------
1007 // -- r3 : argument count (not including receiver)
1008 // -- r6 : new target
1009 // -- r4 : constructor to call
1010 // -- r5 : address of the first argument
1011 // -----------------------------------
1012
1013 // Push a slot for the receiver to be constructed.
1014 __ li(r0, Operand::Zero());
1015 __ push(r0);
1016
1017 // Push the arguments (skip if none).
1018 Label skip;
1019 __ cmpi(r3, Operand::Zero());
1020 __ beq(&skip);
1021 Generate_InterpreterPushArgs(masm, r5, r3, r7);
1022 __ bind(&skip);
1023
1024 // Call the constructor with r3, r4, and r6 unmodified.
1025 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1026}
1027
1028
Ben Murdoch097c5b22016-05-18 11:27:45 +01001029static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001030 // Initialize register file register and dispatch table register.
1031 __ addi(kInterpreterRegisterFileRegister, fp,
1032 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001033 __ mov(kInterpreterDispatchTableRegister,
1034 Operand(ExternalReference::interpreter_dispatch_table_address(
1035 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001036
1037 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 __ LoadP(kContextRegister,
1039 MemOperand(kInterpreterRegisterFileRegister,
1040 InterpreterFrameConstants::kContextFromRegisterPointer));
1041
1042 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001043 __ LoadP(
1044 kInterpreterBytecodeArrayRegister,
1045 MemOperand(kInterpreterRegisterFileRegister,
1046 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001047
1048 if (FLAG_debug_code) {
1049 // Check function data field is actually a BytecodeArray object.
1050 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1051 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1052 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1053 BYTECODE_ARRAY_TYPE);
1054 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1055 }
1056
1057 // Get the target bytecode offset from the frame.
1058 __ LoadP(kInterpreterBytecodeOffsetRegister,
1059 MemOperand(
1060 kInterpreterRegisterFileRegister,
1061 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1062 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1063
1064 // Dispatch to the target bytecode.
1065 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1066 kInterpreterBytecodeOffsetRegister));
1067 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1068 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1069 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1070 __ Jump(ip);
1071}
1072
1073
Ben Murdoch097c5b22016-05-18 11:27:45 +01001074static void Generate_InterpreterNotifyDeoptimizedHelper(
1075 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1076 // Enter an internal frame.
1077 {
1078 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1079
1080 // Pass the deoptimization type to the runtime system.
1081 __ LoadSmiLiteral(r4, Smi::FromInt(static_cast<int>(type)));
1082 __ Push(r4);
1083 __ CallRuntime(Runtime::kNotifyDeoptimized);
1084 // Tear down internal frame.
1085 }
1086
1087 // Drop state (we don't use these for interpreter deopts) and and pop the
1088 // accumulator value into the accumulator register.
1089 __ Drop(1);
1090 __ Pop(kInterpreterAccumulatorRegister);
1091
1092 // Enter the bytecode dispatch.
1093 Generate_EnterBytecodeDispatch(masm);
1094}
1095
1096
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1098 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1099}
1100
1101
1102void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1103 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1104}
1105
1106
1107void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1108 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1109}
1110
Ben Murdoch097c5b22016-05-18 11:27:45 +01001111void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1112 // Set the address of the interpreter entry trampoline as a return address.
1113 // This simulates the initial call to bytecode handlers in interpreter entry
1114 // trampoline. The return will never actually be taken, but our stack walker
1115 // uses this address to determine whether a frame is interpreted.
1116 __ mov(r0,
1117 Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1118 __ mtlr(r0);
1119
1120 Generate_EnterBytecodeDispatch(masm);
1121}
1122
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001123
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001124void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001125 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001126}
1127
1128
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001129void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001130 GenerateTailCallToReturnedCode(masm,
1131 Runtime::kCompileOptimized_NotConcurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001132}
1133
1134
1135void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001136 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001137}
1138
1139
1140static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1141 // For now, we are relying on the fact that make_code_young doesn't do any
1142 // garbage collection which allows us to save/restore the registers without
1143 // worrying about which of them contain pointers. We also don't build an
1144 // internal frame to make the code faster, since we shouldn't have to do stack
1145 // crawls in MakeCodeYoung. This seems a bit fragile.
1146
1147 // Point r3 at the start of the PlatformCodeAge sequence.
1148 __ mr(r3, ip);
1149
1150 // The following registers must be saved and restored when calling through to
1151 // the runtime:
1152 // r3 - contains return address (beginning of patch sequence)
1153 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001154 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001155 // lr - return address
1156 FrameScope scope(masm, StackFrame::MANUAL);
1157 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001159 __ PrepareCallCFunction(2, 0, r5);
1160 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1161 __ CallCFunction(
1162 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001163 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001164 __ mtlr(r0);
1165 __ mr(ip, r3);
1166 __ Jump(ip);
1167}
1168
1169#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1170 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1171 MacroAssembler* masm) { \
1172 GenerateMakeCodeYoungAgainCommon(masm); \
1173 } \
1174 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1175 MacroAssembler* masm) { \
1176 GenerateMakeCodeYoungAgainCommon(masm); \
1177 }
1178CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1179#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1180
1181
1182void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1183 // For now, we are relying on the fact that make_code_young doesn't do any
1184 // garbage collection which allows us to save/restore the registers without
1185 // worrying about which of them contain pointers. We also don't build an
1186 // internal frame to make the code faster, since we shouldn't have to do stack
1187 // crawls in MakeCodeYoung. This seems a bit fragile.
1188
1189 // Point r3 at the start of the PlatformCodeAge sequence.
1190 __ mr(r3, ip);
1191
1192 // The following registers must be saved and restored when calling through to
1193 // the runtime:
1194 // r3 - contains return address (beginning of patch sequence)
1195 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001196 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001197 // lr - return address
1198 FrameScope scope(masm, StackFrame::MANUAL);
1199 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001200 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001201 __ PrepareCallCFunction(2, 0, r5);
1202 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1203 __ CallCFunction(
1204 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1205 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001206 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001207 __ mtlr(r0);
1208 __ mr(ip, r3);
1209
1210 // Perform prologue operations usually performed by the young code stub.
1211 __ PushFixedFrame(r4);
1212 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1213
1214 // Jump to point after the code-age stub.
1215 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1216 __ Jump(r3);
1217}
1218
1219
1220void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1221 GenerateMakeCodeYoungAgainCommon(masm);
1222}
1223
1224
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1226 Generate_MarkCodeAsExecutedOnce(masm);
1227}
1228
1229
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001230static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1231 SaveFPRegsMode save_doubles) {
1232 {
1233 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1234
1235 // Preserve registers across notification, this is important for compiled
1236 // stubs that tail call the runtime on deopts passing their parameters in
1237 // registers.
1238 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1239 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001240 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001241 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1242 }
1243
1244 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1245 __ blr(); // Jump to miss handler
1246}
1247
1248
1249void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1250 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1251}
1252
1253
1254void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1255 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1256}
1257
1258
1259static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1260 Deoptimizer::BailoutType type) {
1261 {
1262 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1263 // Pass the function and deoptimization type to the runtime system.
1264 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1265 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001266 __ CallRuntime(Runtime::kNotifyDeoptimized);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001267 }
1268
1269 // Get the full codegen state from the stack and untag it -> r9.
1270 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1271 __ SmiUntag(r9);
1272 // Switch on the state.
1273 Label with_tos_register, unknown_state;
1274 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
1275 __ bne(&with_tos_register);
1276 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1277 __ Ret();
1278
1279 __ bind(&with_tos_register);
1280 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1281 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
1282 __ bne(&unknown_state);
1283 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1284 __ Ret();
1285
1286 __ bind(&unknown_state);
1287 __ stop("no cases left");
1288}
1289
1290
1291void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1292 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1293}
1294
1295
1296void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1297 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1298}
1299
1300
1301void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1302 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1303}
1304
1305
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001306// Clobbers registers {r7, r8, r9, r10}.
1307void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1308 Register function_template_info,
1309 Label* receiver_check_failed) {
1310 Register signature = r7;
1311 Register map = r8;
1312 Register constructor = r9;
1313 Register scratch = r10;
1314
1315 // If there is no signature, return the holder.
1316 __ LoadP(signature, FieldMemOperand(function_template_info,
1317 FunctionTemplateInfo::kSignatureOffset));
1318 Label receiver_check_passed;
1319 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1320 &receiver_check_passed);
1321
1322 // Walk the prototype chain.
1323 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1324 Label prototype_loop_start;
1325 __ bind(&prototype_loop_start);
1326
1327 // Get the constructor, if any.
1328 __ GetMapConstructor(constructor, map, scratch, scratch);
1329 __ cmpi(scratch, Operand(JS_FUNCTION_TYPE));
1330 Label next_prototype;
1331 __ bne(&next_prototype);
1332 Register type = constructor;
1333 __ LoadP(type,
1334 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1335 __ LoadP(type,
1336 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1337
1338 // Loop through the chain of inheriting function templates.
1339 Label function_template_loop;
1340 __ bind(&function_template_loop);
1341
1342 // If the signatures match, we have a compatible receiver.
1343 __ cmp(signature, type);
1344 __ beq(&receiver_check_passed);
1345
1346 // If the current type is not a FunctionTemplateInfo, load the next prototype
1347 // in the chain.
1348 __ JumpIfSmi(type, &next_prototype);
1349 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1350 __ bne(&next_prototype);
1351
1352 // Otherwise load the parent function template and iterate.
1353 __ LoadP(type,
1354 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1355 __ b(&function_template_loop);
1356
1357 // Load the next prototype.
1358 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001359 __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001360 __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 __ beq(receiver_check_failed, cr0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001362
1363 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1364 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001365 // Iterate.
1366 __ b(&prototype_loop_start);
1367
1368 __ bind(&receiver_check_passed);
1369}
1370
1371
1372void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1373 // ----------- S t a t e -------------
1374 // -- r3 : number of arguments excluding receiver
1375 // -- r4 : callee
1376 // -- lr : return address
1377 // -- sp[0] : last argument
1378 // -- ...
1379 // -- sp[4 * (argc - 1)] : first argument
1380 // -- sp[4 * argc] : receiver
1381 // -----------------------------------
1382
1383
1384 // Load the FunctionTemplateInfo.
1385 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1386 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
1387
1388 // Do the compatible receiver check.
1389 Label receiver_check_failed;
1390 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2));
1391 __ LoadPX(r5, MemOperand(sp, r11));
1392 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed);
1393
1394 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1395 // beginning of the code.
1396 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset));
1397 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset));
1398 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1399 __ JumpToJSEntry(ip);
1400
1401 // Compatible receiver check failed: throw an Illegal Invocation exception.
1402 __ bind(&receiver_check_failed);
1403 // Drop the arguments (including the receiver);
1404 __ addi(r11, r11, Operand(kPointerSize));
1405 __ add(sp, sp, r11);
1406 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1407}
1408
1409
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001410void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1411 // Lookup the function in the JavaScript frame.
1412 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1413 {
1414 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1415 // Pass function as argument.
1416 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001417 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001418 }
1419
1420 // If the code object is null, just return to the unoptimized code.
1421 Label skip;
1422 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1423 __ bne(&skip);
1424 __ Ret();
1425
1426 __ bind(&skip);
1427
1428 // Load deoptimization data from the code object.
1429 // <deopt_data> = <code>[#deoptimization_data_offset]
1430 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1431
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001432 {
1433 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001434 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1435
1436 if (FLAG_enable_embedded_constant_pool) {
1437 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1438 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001439
1440 // Load the OSR entrypoint offset from the deoptimization data.
1441 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1442 __ LoadP(r4, FieldMemOperand(
1443 r4, FixedArray::OffsetOfElementAt(
1444 DeoptimizationInputData::kOsrPcOffsetIndex)));
1445 __ SmiUntag(r4);
1446
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001447 // Compute the target address = code start + osr_offset
1448 __ add(r0, r3, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001449
1450 // And "return" to the OSR entry point of the function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451 __ mtlr(r0);
1452 __ blr();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001453 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001454}
1455
1456
1457void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1458 // We check the stack limit as indicator that recompilation might be done.
1459 Label ok;
1460 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1461 __ cmpl(sp, ip);
1462 __ bge(&ok);
1463 {
1464 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001465 __ CallRuntime(Runtime::kStackGuard);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001466 }
1467 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1468 RelocInfo::CODE_TARGET);
1469
1470 __ bind(&ok);
1471 __ Ret();
1472}
1473
1474
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001475// static
1476void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1477 int field_index) {
1478 // ----------- S t a t e -------------
1479 // -- lr : return address
1480 // -- sp[0] : receiver
1481 // -----------------------------------
1482
1483 // 1. Pop receiver into r3 and check that it's actually a JSDate object.
1484 Label receiver_not_date;
1485 {
1486 __ Pop(r3);
1487 __ JumpIfSmi(r3, &receiver_not_date);
1488 __ CompareObjectType(r3, r4, r5, JS_DATE_TYPE);
1489 __ bne(&receiver_not_date);
1490 }
1491
1492 // 2. Load the specified date field, falling back to the runtime as necessary.
1493 if (field_index == JSDate::kDateValue) {
1494 __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset));
1495 } else {
1496 if (field_index < JSDate::kFirstUncachedField) {
1497 Label stamp_mismatch;
1498 __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1499 __ LoadP(r4, MemOperand(r4));
1500 __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset));
1501 __ cmp(r4, ip);
1502 __ bne(&stamp_mismatch);
1503 __ LoadP(r3, FieldMemOperand(
1504 r3, JSDate::kValueOffset + field_index * kPointerSize));
1505 __ Ret();
1506 __ bind(&stamp_mismatch);
1507 }
1508 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1509 __ PrepareCallCFunction(2, r4);
1510 __ LoadSmiLiteral(r4, Smi::FromInt(field_index));
1511 __ CallCFunction(
1512 ExternalReference::get_date_field_function(masm->isolate()), 2);
1513 }
1514 __ Ret();
1515
1516 // 3. Raise a TypeError if the receiver is not a date.
1517 __ bind(&receiver_not_date);
1518 __ TailCallRuntime(Runtime::kThrowNotDateError);
1519}
1520
1521
1522// static
1523void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1524 // ----------- S t a t e -------------
1525 // -- r3 : argc
1526 // -- sp[0] : argArray
1527 // -- sp[4] : thisArg
1528 // -- sp[8] : receiver
1529 // -----------------------------------
1530
1531 // 1. Load receiver into r4, argArray into r3 (if present), remove all
1532 // arguments from the stack (including the receiver), and push thisArg (if
1533 // present) instead.
1534 {
1535 Label skip;
1536 Register arg_size = r5;
1537 Register new_sp = r6;
1538 Register scratch = r7;
1539 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1540 __ add(new_sp, sp, arg_size);
1541 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1542 __ mr(scratch, r3);
1543 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver
1544 __ cmpi(arg_size, Operand(kPointerSize));
1545 __ blt(&skip);
1546 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1547 __ beq(&skip);
1548 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1549 __ bind(&skip);
1550 __ mr(sp, new_sp);
1551 __ StoreP(scratch, MemOperand(sp, 0));
1552 }
1553
1554 // ----------- S t a t e -------------
1555 // -- r3 : argArray
1556 // -- r4 : receiver
1557 // -- sp[0] : thisArg
1558 // -----------------------------------
1559
1560 // 2. Make sure the receiver is actually callable.
1561 Label receiver_not_callable;
1562 __ JumpIfSmi(r4, &receiver_not_callable);
1563 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1564 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1565 __ TestBit(r7, Map::kIsCallable, r0);
1566 __ beq(&receiver_not_callable, cr0);
1567
1568 // 3. Tail call with no arguments if argArray is null or undefined.
1569 Label no_arguments;
1570 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
1571 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
1572
1573 // 4a. Apply the receiver to the given argArray (passing undefined for
1574 // new.target).
1575 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1576 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1577
1578 // 4b. The argArray is either null or undefined, so we tail call without any
1579 // arguments to the receiver.
1580 __ bind(&no_arguments);
1581 {
1582 __ li(r3, Operand::Zero());
1583 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1584 }
1585
1586 // 4c. The receiver is not callable, throw an appropriate TypeError.
1587 __ bind(&receiver_not_callable);
1588 {
1589 __ StoreP(r4, MemOperand(sp, 0));
1590 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1591 }
1592}
1593
1594
1595// static
1596void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001597 // 1. Make sure we have at least one argument.
1598 // r3: actual number of arguments
1599 {
1600 Label done;
1601 __ cmpi(r3, Operand::Zero());
1602 __ bne(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001603 __ PushRoot(Heap::kUndefinedValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001604 __ addi(r3, r3, Operand(1));
1605 __ bind(&done);
1606 }
1607
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001608 // 2. Get the callable to call (passed as receiver) from the stack.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001609 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1611 __ LoadPX(r4, MemOperand(sp, r5));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001612
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613 // 3. Shift arguments and return address one slot down on the stack
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001614 // (overwriting the original receiver). Adjust argument count to make
1615 // the original first argument the new receiver.
1616 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001617 // r4: callable
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001618 {
1619 Label loop;
1620 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001621 __ add(r5, sp, r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001622
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001623
1624 __ mtctr(r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001625 __ bind(&loop);
1626 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1627 __ StoreP(ip, MemOperand(r5));
1628 __ subi(r5, r5, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 __ bdnz(&loop);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001630 // Adjust the actual number of arguments and remove the top element
1631 // (which is a copy of the last argument).
1632 __ subi(r3, r3, Operand(1));
1633 __ pop();
1634 }
1635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001636 // 4. Call the callable.
1637 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001638}
1639
1640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1642 // ----------- S t a t e -------------
1643 // -- r3 : argc
1644 // -- sp[0] : argumentsList
1645 // -- sp[4] : thisArgument
1646 // -- sp[8] : target
1647 // -- sp[12] : receiver
1648 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001649
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001650 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1651 // remove all arguments from the stack (including the receiver), and push
1652 // thisArgument (if present) instead.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001653 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654 Label skip;
1655 Register arg_size = r5;
1656 Register new_sp = r6;
1657 Register scratch = r7;
1658 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1659 __ add(new_sp, sp, arg_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001660 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001661 __ mr(scratch, r4);
1662 __ mr(r3, r4);
1663 __ cmpi(arg_size, Operand(kPointerSize));
1664 __ blt(&skip);
1665 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1666 __ beq(&skip);
1667 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1668 __ cmpi(arg_size, Operand(2 * kPointerSize));
1669 __ beq(&skip);
1670 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1671 __ bind(&skip);
1672 __ mr(sp, new_sp);
1673 __ StoreP(scratch, MemOperand(sp, 0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001674 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675
1676 // ----------- S t a t e -------------
1677 // -- r3 : argumentsList
1678 // -- r4 : target
1679 // -- sp[0] : thisArgument
1680 // -----------------------------------
1681
1682 // 2. Make sure the target is actually callable.
1683 Label target_not_callable;
1684 __ JumpIfSmi(r4, &target_not_callable);
1685 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1686 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1687 __ TestBit(r7, Map::kIsCallable, r0);
1688 __ beq(&target_not_callable, cr0);
1689
1690 // 3a. Apply the target to the given argumentsList (passing undefined for
1691 // new.target).
1692 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1693 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1694
1695 // 3b. The target is not callable, throw an appropriate TypeError.
1696 __ bind(&target_not_callable);
1697 {
1698 __ StoreP(r4, MemOperand(sp, 0));
1699 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1700 }
1701}
1702
1703
1704void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1705 // ----------- S t a t e -------------
1706 // -- r3 : argc
1707 // -- sp[0] : new.target (optional)
1708 // -- sp[4] : argumentsList
1709 // -- sp[8] : target
1710 // -- sp[12] : receiver
1711 // -----------------------------------
1712
1713 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1714 // new.target into r6 (if present, otherwise use target), remove all
1715 // arguments from the stack (including the receiver), and push thisArgument
1716 // (if present) instead.
1717 {
1718 Label skip;
1719 Register arg_size = r5;
1720 Register new_sp = r7;
1721 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1722 __ add(new_sp, sp, arg_size);
1723 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1724 __ mr(r3, r4);
1725 __ mr(r6, r4);
1726 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
1727 __ cmpi(arg_size, Operand(kPointerSize));
1728 __ blt(&skip);
1729 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1730 __ mr(r6, r4); // new.target defaults to target
1731 __ beq(&skip);
1732 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1733 __ cmpi(arg_size, Operand(2 * kPointerSize));
1734 __ beq(&skip);
1735 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1736 __ bind(&skip);
1737 __ mr(sp, new_sp);
1738 }
1739
1740 // ----------- S t a t e -------------
1741 // -- r3 : argumentsList
1742 // -- r6 : new.target
1743 // -- r4 : target
1744 // -- sp[0] : receiver (undefined)
1745 // -----------------------------------
1746
1747 // 2. Make sure the target is actually a constructor.
1748 Label target_not_constructor;
1749 __ JumpIfSmi(r4, &target_not_constructor);
1750 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1751 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1752 __ TestBit(r7, Map::kIsConstructor, r0);
1753 __ beq(&target_not_constructor, cr0);
1754
1755 // 3. Make sure the target is actually a constructor.
1756 Label new_target_not_constructor;
1757 __ JumpIfSmi(r6, &new_target_not_constructor);
1758 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
1759 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1760 __ TestBit(r7, Map::kIsConstructor, r0);
1761 __ beq(&new_target_not_constructor, cr0);
1762
1763 // 4a. Construct the target with the given new.target and argumentsList.
1764 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1765
1766 // 4b. The target is not a constructor, throw an appropriate TypeError.
1767 __ bind(&target_not_constructor);
1768 {
1769 __ StoreP(r4, MemOperand(sp, 0));
1770 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1771 }
1772
1773 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1774 __ bind(&new_target_not_constructor);
1775 {
1776 __ StoreP(r6, MemOperand(sp, 0));
1777 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1778 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001779}
1780
1781
1782static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1783 Label* stack_overflow) {
1784 // ----------- S t a t e -------------
1785 // -- r3 : actual number of arguments
1786 // -- r4 : function (passed through to callee)
1787 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001788 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001789 // -----------------------------------
1790 // Check the stack for overflow. We are not trying to catch
1791 // interruptions (e.g. debug break and preemption) here, so the "real stack
1792 // limit" is checked.
1793 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1794 // Make r8 the space we have left. The stack might already be overflowed
1795 // here which will cause r8 to become negative.
1796 __ sub(r8, sp, r8);
1797 // Check if the arguments will overflow the stack.
1798 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1799 __ cmp(r8, r0);
1800 __ ble(stack_overflow); // Signed comparison.
1801}
1802
1803
1804static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1805 __ SmiTag(r3);
1806 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1807 __ mflr(r0);
1808 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001809 if (FLAG_enable_embedded_constant_pool) {
1810 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1811 } else {
1812 __ Push(fp, r7, r4, r3);
1813 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001814 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1815 kPointerSize));
1816}
1817
1818
1819static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1820 // ----------- S t a t e -------------
1821 // -- r3 : result being passed through
1822 // -----------------------------------
1823 // Get the number of arguments passed (as a smi), tear down the frame and
1824 // then tear down the parameters.
1825 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1826 kPointerSize)));
1827 int stack_adjustment = kPointerSize; // adjust for receiver
1828 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1829 __ SmiToPtrArrayOffset(r0, r4);
1830 __ add(sp, sp, r0);
1831}
1832
1833
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001834// static
1835void Builtins::Generate_Apply(MacroAssembler* masm) {
1836 // ----------- S t a t e -------------
1837 // -- r3 : argumentsList
1838 // -- r4 : target
1839 // -- r6 : new.target (checked to be constructor or undefined)
1840 // -- sp[0] : thisArgument
1841 // -----------------------------------
1842
1843 // Create the list of arguments from the array-like argumentsList.
1844 {
1845 Label create_arguments, create_array, create_runtime, done_create;
1846 __ JumpIfSmi(r3, &create_runtime);
1847
1848 // Load the map of argumentsList into r5.
1849 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1850
1851 // Load native context into r7.
1852 __ LoadP(r7, NativeContextMemOperand());
1853
1854 // Check if argumentsList is an (unmodified) arguments object.
1855 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1856 __ cmp(ip, r5);
1857 __ beq(&create_arguments);
1858 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
1859 __ cmp(ip, r5);
1860 __ beq(&create_arguments);
1861
1862 // Check if argumentsList is a fast JSArray.
1863 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
1864 __ beq(&create_array);
1865
1866 // Ask the runtime to create the list (actually a FixedArray).
1867 __ bind(&create_runtime);
1868 {
1869 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1870 __ Push(r4, r6, r3);
1871 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1872 __ Pop(r4, r6);
1873 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
1874 __ SmiUntag(r5);
1875 }
1876 __ b(&done_create);
1877
1878 // Try to create the list from an arguments object.
1879 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001880 __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001881 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
1882 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
1883 __ cmp(r5, ip);
1884 __ bne(&create_runtime);
1885 __ SmiUntag(r5);
1886 __ mr(r3, r7);
1887 __ b(&done_create);
1888
1889 // Try to create the list from a JSArray object.
1890 __ bind(&create_array);
1891 __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset));
1892 __ DecodeField<Map::ElementsKindBits>(r5);
1893 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1894 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1895 STATIC_ASSERT(FAST_ELEMENTS == 2);
1896 __ cmpi(r5, Operand(FAST_ELEMENTS));
1897 __ bgt(&create_runtime);
1898 __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS));
1899 __ beq(&create_runtime);
1900 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
1901 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
1902 __ SmiUntag(r5);
1903
1904 __ bind(&done_create);
1905 }
1906
1907 // Check for stack overflow.
1908 {
1909 // Check the stack for overflow. We are not trying to catch interruptions
1910 // (i.e. debug break and preemption) here, so check the "real stack limit".
1911 Label done;
1912 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1913 // Make ip the space we have left. The stack might already be overflowed
1914 // here which will cause ip to become negative.
1915 __ sub(ip, sp, ip);
1916 // Check if the arguments will overflow the stack.
1917 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1918 __ cmp(ip, r0); // Signed comparison.
1919 __ bgt(&done);
1920 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1921 __ bind(&done);
1922 }
1923
1924 // ----------- S t a t e -------------
1925 // -- r4 : target
1926 // -- r3 : args (a FixedArray built from argumentsList)
1927 // -- r5 : len (number of elements to push from args)
1928 // -- r6 : new.target (checked to be constructor or undefined)
1929 // -- sp[0] : thisArgument
1930 // -----------------------------------
1931
1932 // Push arguments onto the stack (thisArgument is already on the stack).
1933 {
1934 Label loop, no_args;
1935 __ cmpi(r5, Operand::Zero());
1936 __ beq(&no_args);
1937 __ addi(r3, r3,
1938 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1939 __ mtctr(r5);
1940 __ bind(&loop);
1941 __ LoadPU(r0, MemOperand(r3, kPointerSize));
1942 __ push(r0);
1943 __ bdnz(&loop);
1944 __ bind(&no_args);
1945 __ mr(r3, r5);
1946 }
1947
1948 // Dispatch to Call or Construct depending on whether new.target is undefined.
1949 {
1950 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
1951 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1952 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1953 }
1954}
1955
Ben Murdoch097c5b22016-05-18 11:27:45 +01001956namespace {
1957
1958// Drops top JavaScript frame and an arguments adaptor frame below it (if
1959// present) preserving all the arguments prepared for current call.
1960// Does nothing if debugger is currently active.
1961// ES6 14.6.3. PrepareForTailCall
1962//
1963// Stack structure for the function g() tail calling f():
1964//
1965// ------- Caller frame: -------
1966// | ...
1967// | g()'s arg M
1968// | ...
1969// | g()'s arg 1
1970// | g()'s receiver arg
1971// | g()'s caller pc
1972// ------- g()'s frame: -------
1973// | g()'s caller fp <- fp
1974// | g()'s context
1975// | function pointer: g
1976// | -------------------------
1977// | ...
1978// | ...
1979// | f()'s arg N
1980// | ...
1981// | f()'s arg 1
1982// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1983// ----------------------
1984//
1985void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1986 Register scratch1, Register scratch2,
1987 Register scratch3) {
1988 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1989 Comment cmnt(masm, "[ PrepareForTailCall");
1990
1991 // Prepare for tail call only if the debugger is not active.
1992 Label done;
1993 ExternalReference debug_is_active =
1994 ExternalReference::debug_is_active_address(masm->isolate());
1995 __ mov(scratch1, Operand(debug_is_active));
1996 __ lbz(scratch1, MemOperand(scratch1));
1997 __ cmpi(scratch1, Operand::Zero());
1998 __ bne(&done);
1999
2000 // Drop possible interpreter handler/stub frame.
2001 {
2002 Label no_interpreter_frame;
2003 __ LoadP(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset));
2004 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
2005 __ bne(&no_interpreter_frame);
2006 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2007 __ bind(&no_interpreter_frame);
2008 }
2009
2010 // Check if next frame is an arguments adaptor frame.
2011 Label no_arguments_adaptor, formal_parameter_count_loaded;
2012 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2013 __ LoadP(scratch3,
2014 MemOperand(scratch2, StandardFrameConstants::kContextOffset));
2015 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2016 __ bne(&no_arguments_adaptor);
2017
2018 // Drop arguments adaptor frame and load arguments count.
2019 __ mr(fp, scratch2);
2020 __ LoadP(scratch1,
2021 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2022 __ SmiUntag(scratch1);
2023 __ b(&formal_parameter_count_loaded);
2024
2025 __ bind(&no_arguments_adaptor);
2026 // Load caller's formal parameter count
2027 __ LoadP(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2028 __ LoadP(scratch1,
2029 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2030 __ LoadWordArith(
2031 scratch1, FieldMemOperand(
2032 scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
2033#if !V8_TARGET_ARCH_PPC64
2034 __ SmiUntag(scratch1);
2035#endif
2036
2037 __ bind(&formal_parameter_count_loaded);
2038
2039 // Calculate the end of destination area where we will put the arguments
2040 // after we drop current frame. We add kPointerSize to count the receiver
2041 // argument which is not included into formal parameters count.
2042 Register dst_reg = scratch2;
2043 __ ShiftLeftImm(dst_reg, scratch1, Operand(kPointerSizeLog2));
2044 __ add(dst_reg, fp, dst_reg);
2045 __ addi(dst_reg, dst_reg,
2046 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
2047
2048 Register src_reg = scratch1;
2049 __ ShiftLeftImm(src_reg, args_reg, Operand(kPointerSizeLog2));
2050 __ add(src_reg, sp, src_reg);
2051 // Count receiver argument as well (not included in args_reg).
2052 __ addi(src_reg, src_reg, Operand(kPointerSize));
2053
2054 if (FLAG_debug_code) {
2055 __ cmpl(src_reg, dst_reg);
2056 __ Check(lt, kStackAccessBelowStackPointer);
2057 }
2058
2059 // Restore caller's frame pointer and return address now as they will be
2060 // overwritten by the copying loop.
2061 __ RestoreFrameStateForTailCall();
2062
2063 // Now copy callee arguments to the caller frame going backwards to avoid
2064 // callee arguments corruption (source and destination areas could overlap).
2065
2066 // Both src_reg and dst_reg are pointing to the word after the one to copy,
2067 // so they must be pre-decremented in the loop.
2068 Register tmp_reg = scratch3;
2069 Label loop;
2070 __ addi(tmp_reg, args_reg, Operand(1)); // +1 for receiver
2071 __ mtctr(tmp_reg);
2072 __ bind(&loop);
2073 __ LoadPU(tmp_reg, MemOperand(src_reg, -kPointerSize));
2074 __ StorePU(tmp_reg, MemOperand(dst_reg, -kPointerSize));
2075 __ bdnz(&loop);
2076
2077 // Leave current frame.
2078 __ mr(sp, dst_reg);
2079
2080 __ bind(&done);
2081}
2082} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002083
2084// static
2085void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002086 ConvertReceiverMode mode,
2087 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002088 // ----------- S t a t e -------------
2089 // -- r3 : the number of arguments (not including the receiver)
2090 // -- r4 : the function to call (checked to be a JSFunction)
2091 // -----------------------------------
2092 __ AssertFunction(r4);
2093
2094 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2095 // Check that the function is not a "classConstructor".
2096 Label class_constructor;
2097 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2098 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
2099 __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0);
2100 __ bne(&class_constructor, cr0);
2101
2102 // Enter the context of the function; ToObject has to run in the function
2103 // context, and we also need to take the global proxy from the function
2104 // context in case of conversion.
2105 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2106 // We need to convert the receiver for non-native sloppy mode functions.
2107 Label done_convert;
2108 __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2109 (1 << SharedFunctionInfo::kNativeBit)));
2110 __ bne(&done_convert, cr0);
2111 {
2112 // ----------- S t a t e -------------
2113 // -- r3 : the number of arguments (not including the receiver)
2114 // -- r4 : the function to call (checked to be a JSFunction)
2115 // -- r5 : the shared function info.
2116 // -- cp : the function context.
2117 // -----------------------------------
2118
2119 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2120 // Patch receiver to global proxy.
2121 __ LoadGlobalProxy(r6);
2122 } else {
2123 Label convert_to_object, convert_receiver;
2124 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
2125 __ LoadPX(r6, MemOperand(sp, r6));
2126 __ JumpIfSmi(r6, &convert_to_object);
2127 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2128 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2129 __ bge(&done_convert);
2130 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2131 Label convert_global_proxy;
2132 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
2133 &convert_global_proxy);
2134 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
2135 __ bind(&convert_global_proxy);
2136 {
2137 // Patch receiver to global proxy.
2138 __ LoadGlobalProxy(r6);
2139 }
2140 __ b(&convert_receiver);
2141 }
2142 __ bind(&convert_to_object);
2143 {
2144 // Convert receiver using ToObject.
2145 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2146 // in the fast case? (fall back to AllocateInNewSpace?)
2147 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2148 __ SmiTag(r3);
2149 __ Push(r3, r4);
2150 __ mr(r3, r6);
2151 ToObjectStub stub(masm->isolate());
2152 __ CallStub(&stub);
2153 __ mr(r6, r3);
2154 __ Pop(r3, r4);
2155 __ SmiUntag(r3);
2156 }
2157 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2158 __ bind(&convert_receiver);
2159 }
2160 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2161 __ StorePX(r6, MemOperand(sp, r7));
2162 }
2163 __ bind(&done_convert);
2164
2165 // ----------- S t a t e -------------
2166 // -- r3 : the number of arguments (not including the receiver)
2167 // -- r4 : the function to call (checked to be a JSFunction)
2168 // -- r5 : the shared function info.
2169 // -- cp : the function context.
2170 // -----------------------------------
2171
Ben Murdoch097c5b22016-05-18 11:27:45 +01002172 if (tail_call_mode == TailCallMode::kAllow) {
2173 PrepareForTailCall(masm, r3, r6, r7, r8);
2174 }
2175
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002176 __ LoadWordArith(
2177 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2178#if !V8_TARGET_ARCH_PPC64
2179 __ SmiUntag(r5);
2180#endif
2181 ParameterCount actual(r3);
2182 ParameterCount expected(r5);
2183 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION,
2184 CheckDebugStepCallWrapper());
2185
2186 // The function is a "classConstructor", need to raise an exception.
2187 __ bind(&class_constructor);
2188 {
2189 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2190 __ push(r4);
2191 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2192 }
2193}
2194
2195
2196namespace {
2197
2198void Generate_PushBoundArguments(MacroAssembler* masm) {
2199 // ----------- S t a t e -------------
2200 // -- r3 : the number of arguments (not including the receiver)
2201 // -- r4 : target (checked to be a JSBoundFunction)
2202 // -- r6 : new.target (only in case of [[Construct]])
2203 // -----------------------------------
2204
2205 // Load [[BoundArguments]] into r5 and length of that into r7.
2206 Label no_bound_arguments;
2207 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2208 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2209 __ SmiUntag(r7, SetRC);
2210 __ beq(&no_bound_arguments, cr0);
2211 {
2212 // ----------- S t a t e -------------
2213 // -- r3 : the number of arguments (not including the receiver)
2214 // -- r4 : target (checked to be a JSBoundFunction)
2215 // -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2216 // -- r6 : new.target (only in case of [[Construct]])
2217 // -- r7 : the number of [[BoundArguments]]
2218 // -----------------------------------
2219
2220 // Reserve stack space for the [[BoundArguments]].
2221 {
2222 Label done;
2223 __ mr(r9, sp); // preserve previous stack pointer
2224 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2225 __ sub(sp, sp, r10);
2226 // Check the stack for overflow. We are not trying to catch interruptions
2227 // (i.e. debug break and preemption) here, so check the "real stack
2228 // limit".
2229 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2230 __ bgt(&done); // Signed comparison.
2231 // Restore the stack pointer.
2232 __ mr(sp, r9);
2233 {
2234 FrameScope scope(masm, StackFrame::MANUAL);
2235 __ EnterFrame(StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002236 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 }
2238 __ bind(&done);
2239 }
2240
2241 // Relocate arguments down the stack.
2242 // -- r3 : the number of arguments (not including the receiver)
2243 // -- r9 : the previous stack pointer
2244 // -- r10: the size of the [[BoundArguments]]
2245 {
2246 Label skip, loop;
2247 __ li(r8, Operand::Zero());
2248 __ cmpi(r3, Operand::Zero());
2249 __ beq(&skip);
2250 __ mtctr(r3);
2251 __ bind(&loop);
2252 __ LoadPX(r0, MemOperand(r9, r8));
2253 __ StorePX(r0, MemOperand(sp, r8));
2254 __ addi(r8, r8, Operand(kPointerSize));
2255 __ bdnz(&loop);
2256 __ bind(&skip);
2257 }
2258
2259 // Copy [[BoundArguments]] to the stack (below the arguments).
2260 {
2261 Label loop;
2262 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2263 __ add(r5, r5, r10);
2264 __ mtctr(r7);
2265 __ bind(&loop);
2266 __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2267 __ StorePX(r0, MemOperand(sp, r8));
2268 __ addi(r8, r8, Operand(kPointerSize));
2269 __ bdnz(&loop);
2270 __ add(r3, r3, r7);
2271 }
2272 }
2273 __ bind(&no_bound_arguments);
2274}
2275
2276} // namespace
2277
2278
2279// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002280void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2281 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002282 // ----------- S t a t e -------------
2283 // -- r3 : the number of arguments (not including the receiver)
2284 // -- r4 : the function to call (checked to be a JSBoundFunction)
2285 // -----------------------------------
2286 __ AssertBoundFunction(r4);
2287
Ben Murdoch097c5b22016-05-18 11:27:45 +01002288 if (tail_call_mode == TailCallMode::kAllow) {
2289 PrepareForTailCall(masm, r3, r6, r7, r8);
2290 }
2291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292 // Patch the receiver to [[BoundThis]].
2293 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2294 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2295 __ StorePX(ip, MemOperand(sp, r0));
2296
2297 // Push the [[BoundArguments]] onto the stack.
2298 Generate_PushBoundArguments(masm);
2299
2300 // Call the [[BoundTargetFunction]] via the Call builtin.
2301 __ LoadP(r4,
2302 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2303 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2304 masm->isolate())));
2305 __ LoadP(ip, MemOperand(ip));
2306 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2307 __ JumpToJSEntry(ip);
2308}
2309
2310
2311// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002312void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2313 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002314 // ----------- S t a t e -------------
2315 // -- r3 : the number of arguments (not including the receiver)
2316 // -- r4 : the target to call (can be any Object).
2317 // -----------------------------------
2318
2319 Label non_callable, non_function, non_smi;
2320 __ JumpIfSmi(r4, &non_callable);
2321 __ bind(&non_smi);
2322 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002323 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002324 RelocInfo::CODE_TARGET, eq);
2325 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002326 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002327 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002328
2329 // Check if target has a [[Call]] internal method.
2330 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2331 __ TestBit(r7, Map::kIsCallable, r0);
2332 __ beq(&non_callable, cr0);
2333
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002334 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2335 __ bne(&non_function);
2336
Ben Murdoch097c5b22016-05-18 11:27:45 +01002337 // 0. Prepare for tail call if necessary.
2338 if (tail_call_mode == TailCallMode::kAllow) {
2339 PrepareForTailCall(masm, r3, r6, r7, r8);
2340 }
2341
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002342 // 1. Runtime fallback for Proxy [[Call]].
2343 __ Push(r4);
2344 // Increase the arguments size to include the pushed function and the
2345 // existing receiver on the stack.
2346 __ addi(r3, r3, Operand(2));
2347 // Tail-call to the runtime.
2348 __ JumpToExternalReference(
2349 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2350
2351 // 2. Call to something else, which might have a [[Call]] internal method (if
2352 // not we raise an exception).
2353 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002354 // Overwrite the original receiver the (original) target.
2355 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2356 __ StorePX(r4, MemOperand(sp, r8));
2357 // Let the "call_as_function_delegate" take care of the rest.
2358 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2359 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002360 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002361 RelocInfo::CODE_TARGET);
2362
2363 // 3. Call to something that is not callable.
2364 __ bind(&non_callable);
2365 {
2366 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2367 __ Push(r4);
2368 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2369 }
2370}
2371
2372
2373// static
2374void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2375 // ----------- S t a t e -------------
2376 // -- r3 : the number of arguments (not including the receiver)
2377 // -- r4 : the constructor to call (checked to be a JSFunction)
2378 // -- r6 : the new target (checked to be a constructor)
2379 // -----------------------------------
2380 __ AssertFunction(r4);
2381
2382 // Calling convention for function specific ConstructStubs require
2383 // r5 to contain either an AllocationSite or undefined.
2384 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2385
2386 // Tail call to the function-specific construct stub (still in the caller
2387 // context at this point).
2388 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2389 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2390 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2391 __ JumpToJSEntry(ip);
2392}
2393
2394
2395// static
2396void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2397 // ----------- S t a t e -------------
2398 // -- r3 : the number of arguments (not including the receiver)
2399 // -- r4 : the function to call (checked to be a JSBoundFunction)
2400 // -- r6 : the new target (checked to be a constructor)
2401 // -----------------------------------
2402 __ AssertBoundFunction(r4);
2403
2404 // Push the [[BoundArguments]] onto the stack.
2405 Generate_PushBoundArguments(masm);
2406
2407 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2408 Label skip;
2409 __ cmp(r4, r6);
2410 __ bne(&skip);
2411 __ LoadP(r6,
2412 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2413 __ bind(&skip);
2414
2415 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2416 __ LoadP(r4,
2417 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2418 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2419 __ LoadP(ip, MemOperand(ip));
2420 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2421 __ JumpToJSEntry(ip);
2422}
2423
2424
2425// static
2426void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2427 // ----------- S t a t e -------------
2428 // -- r3 : the number of arguments (not including the receiver)
2429 // -- r4 : the constructor to call (checked to be a JSProxy)
2430 // -- r6 : the new target (either the same as the constructor or
2431 // the JSFunction on which new was invoked initially)
2432 // -----------------------------------
2433
2434 // Call into the Runtime for Proxy [[Construct]].
2435 __ Push(r4, r6);
2436 // Include the pushed new_target, constructor and the receiver.
2437 __ addi(r3, r3, Operand(3));
2438 // Tail-call to the runtime.
2439 __ JumpToExternalReference(
2440 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2441}
2442
2443
2444// static
2445void Builtins::Generate_Construct(MacroAssembler* masm) {
2446 // ----------- S t a t e -------------
2447 // -- r3 : the number of arguments (not including the receiver)
2448 // -- r4 : the constructor to call (can be any Object)
2449 // -- r6 : the new target (either the same as the constructor or
2450 // the JSFunction on which new was invoked initially)
2451 // -----------------------------------
2452
2453 // Check if target is a Smi.
2454 Label non_constructor;
2455 __ JumpIfSmi(r4, &non_constructor);
2456
2457 // Dispatch based on instance type.
2458 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2459 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2460 RelocInfo::CODE_TARGET, eq);
2461
2462 // Check if target has a [[Construct]] internal method.
2463 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2464 __ TestBit(r5, Map::kIsConstructor, r0);
2465 __ beq(&non_constructor, cr0);
2466
2467 // Only dispatch to bound functions after checking whether they are
2468 // constructors.
2469 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2470 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2471 RelocInfo::CODE_TARGET, eq);
2472
2473 // Only dispatch to proxies after checking whether they are constructors.
2474 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2475 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2476 eq);
2477
2478 // Called Construct on an exotic Object with a [[Construct]] internal method.
2479 {
2480 // Overwrite the original receiver with the (original) target.
2481 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2482 __ StorePX(r4, MemOperand(sp, r8));
2483 // Let the "call_as_constructor_delegate" take care of the rest.
2484 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2485 __ Jump(masm->isolate()->builtins()->CallFunction(),
2486 RelocInfo::CODE_TARGET);
2487 }
2488
2489 // Called Construct on an Object that doesn't have a [[Construct]] internal
2490 // method.
2491 __ bind(&non_constructor);
2492 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2493 RelocInfo::CODE_TARGET);
2494}
2495
2496
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002497void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2498 // ----------- S t a t e -------------
2499 // -- r3 : actual number of arguments
2500 // -- r4 : function (passed through to callee)
2501 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002502 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002503 // -----------------------------------
2504
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002505 Label invoke, dont_adapt_arguments, stack_overflow;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002506
2507 Label enough, too_few;
2508 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2509 __ cmp(r3, r5);
2510 __ blt(&too_few);
2511 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2512 __ beq(&dont_adapt_arguments);
2513
2514 { // Enough parameters: actual >= expected
2515 __ bind(&enough);
2516 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002517 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002518
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002519 // Calculate copy start address into r3 and copy end address into r7.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002520 // r3: actual number of arguments as a smi
2521 // r4: function
2522 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002523 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002524 // ip: code entry to call
2525 __ SmiToPtrArrayOffset(r3, r3);
2526 __ add(r3, r3, fp);
2527 // adjust for return address and receiver
2528 __ addi(r3, r3, Operand(2 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002529 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2530 __ sub(r7, r3, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002531
2532 // Copy the arguments (including the receiver) to the new stack frame.
2533 // r3: copy start address
2534 // r4: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002535 // r5: expected number of arguments
2536 // r6: new target (passed through to callee)
2537 // r7: copy end address
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002538 // ip: code entry to call
2539
2540 Label copy;
2541 __ bind(&copy);
2542 __ LoadP(r0, MemOperand(r3, 0));
2543 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002544 __ cmp(r3, r7); // Compare before moving to next argument.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002545 __ subi(r3, r3, Operand(kPointerSize));
2546 __ bne(&copy);
2547
2548 __ b(&invoke);
2549 }
2550
2551 { // Too few parameters: Actual < expected
2552 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002553
2554 // If the function is strong we need to throw an error.
2555 Label no_strong_error;
2556 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2557 __ lwz(r8, FieldMemOperand(r7, SharedFunctionInfo::kCompilerHintsOffset));
2558 __ TestBit(r8, SharedFunctionInfo::kStrongModeBit, r0);
2559 __ beq(&no_strong_error, cr0);
2560
2561 // What we really care about is the required number of arguments.
2562 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kLengthOffset));
2563#if V8_TARGET_ARCH_PPC64
2564 // See commment near kLenghtOffset in src/objects.h
2565 __ srawi(r7, r7, kSmiTagSize);
2566#else
2567 __ SmiUntag(r7);
2568#endif
2569 __ cmp(r3, r7);
2570 __ bge(&no_strong_error);
2571
2572 {
2573 FrameScope frame(masm, StackFrame::MANUAL);
2574 EnterArgumentsAdaptorFrame(masm);
2575 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2576 }
2577
2578 __ bind(&no_strong_error);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002579 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002580 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002581
2582 // Calculate copy start address into r0 and copy end address is fp.
2583 // r3: actual number of arguments as a smi
2584 // r4: function
2585 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002586 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002587 // ip: code entry to call
2588 __ SmiToPtrArrayOffset(r3, r3);
2589 __ add(r3, r3, fp);
2590
2591 // Copy the arguments (including the receiver) to the new stack frame.
2592 // r3: copy start address
2593 // r4: function
2594 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002595 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002596 // ip: code entry to call
2597 Label copy;
2598 __ bind(&copy);
2599 // Adjust load for return address and receiver.
2600 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2601 __ push(r0);
2602 __ cmp(r3, fp); // Compare before moving to next argument.
2603 __ subi(r3, r3, Operand(kPointerSize));
2604 __ bne(&copy);
2605
2606 // Fill the remaining expected arguments with undefined.
2607 // r4: function
2608 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002609 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002610 // ip: code entry to call
2611 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002612 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2613 __ sub(r7, fp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002614 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002615 __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002616 2 * kPointerSize));
2617
2618 Label fill;
2619 __ bind(&fill);
2620 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002621 __ cmp(sp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002622 __ bne(&fill);
2623 }
2624
2625 // Call the entry point.
2626 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002627 __ mr(r3, r5);
2628 // r3 : expected number of arguments
2629 // r4 : function (passed through to callee)
2630 // r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002631 __ CallJSEntry(ip);
2632
2633 // Store offset of return address for deoptimizer.
2634 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2635
2636 // Exit frame and return.
2637 LeaveArgumentsAdaptorFrame(masm);
2638 __ blr();
2639
2640
2641 // -------------------------------------------
2642 // Dont adapt arguments.
2643 // -------------------------------------------
2644 __ bind(&dont_adapt_arguments);
2645 __ JumpToJSEntry(ip);
2646
2647 __ bind(&stack_overflow);
2648 {
2649 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002650 __ CallRuntime(Runtime::kThrowStackOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002651 __ bkpt(0);
2652 }
2653}
2654
2655
2656#undef __
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002657} // namespace internal
2658} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002659
2660#endif // V8_TARGET_ARCH_PPC