blob: c3c2b84be817acd7a6645128cd1226e8abb57158 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005#if V8_TARGET_ARCH_PPC
6
7#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
Ben Murdoch61f157c2016-09-16 13:49:30 +010020void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040021 // ----------- S t a t e -------------
22 // -- r3 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023 // -- r4 : target
24 // -- r6 : new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -040025 // -- sp[0] : last argument
26 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 // -- sp[4 * (argc - 1)] : first argument
Emily Bernierd0a1eb72015-03-24 16:35:39 -040028 // -- sp[4 * argc] : receiver
29 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030 __ AssertFunction(r4);
31
32 // Make sure we operate in the context of the called function (for example
33 // ConstructStubs implemented in C++ will be run in the context of the caller
34 // instead of the callee, due to the way that [[Construct]] is defined for
35 // ordinary functions).
36 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040037
38 // Insert extra arguments.
Ben Murdoch61f157c2016-09-16 13:49:30 +010039 const int num_extra_args = 2;
40 __ Push(r4, r6);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040041
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 // JumpToExternalReference expects r3 to contain the number of arguments
Emily Bernierd0a1eb72015-03-24 16:35:39 -040043 // including the receiver and the extra arguments.
44 __ addi(r3, r3, Operand(num_extra_args + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000045
Emily Bernierd0a1eb72015-03-24 16:35:39 -040046 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
47}
48
49
50// Load the built-in InternalArray function from the current context.
51static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
52 Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 // Load the InternalArray function from the current native context.
54 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040055}
56
57
58// Load the built-in Array function from the current context.
59static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 // Load the Array function from the current native context.
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062}
63
64
65void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
66 // ----------- S t a t e -------------
67 // -- r3 : number of arguments
68 // -- lr : return address
69 // -- sp[...]: constructor arguments
70 // -----------------------------------
71 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
72
73 // Get the InternalArray function.
74 GenerateLoadInternalArrayFunction(masm, r4);
75
76 if (FLAG_debug_code) {
77 // Initial map for the builtin InternalArray functions should be maps.
78 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
79 __ TestIfSmi(r5, r0);
80 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
81 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
83 }
84
85 // Run the native code for the InternalArray function called as a normal
86 // function.
87 // tail call a stub
88 InternalArrayConstructorStub stub(masm->isolate());
89 __ TailCallStub(&stub);
90}
91
92
93void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
94 // ----------- S t a t e -------------
95 // -- r3 : number of arguments
96 // -- lr : return address
97 // -- sp[...]: constructor arguments
98 // -----------------------------------
99 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
100
101 // Get the Array function.
102 GenerateLoadArrayFunction(masm, r4);
103
104 if (FLAG_debug_code) {
105 // Initial map for the builtin Array functions should be maps.
106 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
107 __ TestIfSmi(r5, r0);
108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
109 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
110 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
111 }
112
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 __ mr(r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400114 // Run the native code for the Array function called as a normal function.
115 // tail call a stub
116 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
117 ArrayConstructorStub stub(masm->isolate());
118 __ TailCallStub(&stub);
119}
120
121
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000122// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100123void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
124 // ----------- S t a t e -------------
125 // -- r3 : number of arguments
Ben Murdoch61f157c2016-09-16 13:49:30 +0100126 // -- r4 : function
127 // -- cp : context
Ben Murdoch097c5b22016-05-18 11:27:45 +0100128 // -- lr : return address
129 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
130 // -- sp[(argc + 1) * 8] : receiver
131 // -----------------------------------
132 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
133 Heap::RootListIndex const root_index =
134 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
135 : Heap::kMinusInfinityValueRootIndex;
136 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
137
138 // Load the accumulator with the default return value (either -Infinity or
Ben Murdoch61f157c2016-09-16 13:49:30 +0100139 // +Infinity), with the tagged value in r8 and the double value in d1.
140 __ LoadRoot(r8, root_index);
141 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100142
143 // Setup state for loop
144 // r5: address of arg[0] + kPointerSize
145 // r6: number of slots to drop at exit (arguments + receiver)
Ben Murdoch61f157c2016-09-16 13:49:30 +0100146 __ addi(r7, r3, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100147
148 Label done_loop, loop;
149 __ bind(&loop);
150 {
151 // Check if all parameters done.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100152 __ subi(r3, r3, Operand(1));
153 __ cmpi(r3, Operand::Zero());
154 __ blt(&done_loop);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100155
Ben Murdoch61f157c2016-09-16 13:49:30 +0100156 // Load the next parameter tagged value into r5.
157 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
158 __ LoadPX(r5, MemOperand(sp, r5));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100159
160 // Load the double value of the parameter into d2, maybe converting the
Ben Murdoch61f157c2016-09-16 13:49:30 +0100161 // parameter to a number first using the ToNumber builtin if necessary.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100162 Label convert, convert_smi, convert_number, done_convert;
163 __ bind(&convert);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100164 __ JumpIfSmi(r5, &convert_smi);
165 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
166 __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100167 {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100168 // Parameter is not a Number, use the ToNumber builtin to convert it.
169 FrameScope scope(masm, StackFrame::MANUAL);
170 __ PushStandardFrame(r4);
171 __ SmiTag(r3);
172 __ SmiTag(r7);
173 __ Push(r3, r7, r8);
174 __ mr(r3, r5);
175 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
176 __ mr(r5, r3);
177 __ Pop(r3, r7, r8);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100178 {
179 // Restore the double accumulator value (d1).
180 Label done_restore;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100181 __ SmiToDouble(d1, r8);
182 __ JumpIfSmi(r8, &done_restore);
183 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100184 __ bind(&done_restore);
185 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100186 __ SmiUntag(r7);
187 __ SmiUntag(r3);
188 // TODO(Jaideep): Add macro furtion for PopStandardFrame
189 if (FLAG_enable_embedded_constant_pool) {
190 __ Pop(r0, fp, kConstantPoolRegister, cp, r4);
191 } else {
192 __ Pop(r0, fp, cp, r4);
193 }
194 __ mtlr(r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100195 }
196 __ b(&convert);
197 __ bind(&convert_number);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100198 __ lfd(d2, FieldMemOperand(r5, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100199 __ b(&done_convert);
200 __ bind(&convert_smi);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100201 __ SmiToDouble(d2, r5);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100202 __ bind(&done_convert);
203
204 // Perform the actual comparison with the accumulator value on the left hand
205 // side (d1) and the next parameter value on the right hand side (d2).
206 Label compare_nan, compare_swap;
207 __ fcmpu(d1, d2);
208 __ bunordered(&compare_nan);
209 __ b(cond_done, &loop);
210 __ b(CommuteCondition(cond_done), &compare_swap);
211
212 // Left and right hand side are equal, check for -0 vs. +0.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100213 __ TestDoubleIsMinusZero(reg, r9, r0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100214 __ bne(&loop);
215
216 // Update accumulator. Result is on the right hand side.
217 __ bind(&compare_swap);
218 __ fmr(d1, d2);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100219 __ mr(r8, r5);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100220 __ b(&loop);
221
222 // At least one side is NaN, which means that the result will be NaN too.
223 // We still need to visit the rest of the arguments.
224 __ bind(&compare_nan);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100225 __ LoadRoot(r8, Heap::kNanValueRootIndex);
226 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100227 __ b(&loop);
228 }
229
230 __ bind(&done_loop);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100231 __ mr(r3, r8);
232 __ Drop(r7);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100233 __ Ret();
234}
235
236// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000237void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400238 // ----------- S t a t e -------------
239 // -- r3 : number of arguments
240 // -- r4 : constructor function
241 // -- lr : return address
242 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
243 // -- sp[argc * 4] : receiver
244 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400245
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000246 // 1. Load the first argument into r3 and get rid of the rest (including the
247 // receiver).
248 Label no_arguments;
249 {
250 __ cmpi(r3, Operand::Zero());
251 __ beq(&no_arguments);
252 __ subi(r3, r3, Operand(1));
253 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
254 __ LoadPUX(r3, MemOperand(sp, r3));
255 __ Drop(2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400256 }
257
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 // 2a. Convert the first argument to a number.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100259 __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400260
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 // 2b. No arguments, return +0.
262 __ bind(&no_arguments);
263 __ LoadSmiLiteral(r3, Smi::FromInt(0));
264 __ Ret(1);
265}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400266
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267
268// static
269void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271 // -- r3 : number of arguments
272 // -- r4 : constructor function
273 // -- r6 : new target
274 // -- lr : return address
275 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
276 // -- sp[argc * 4] : receiver
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400277 // -----------------------------------
278
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 // 1. Make sure we operate in the context of the called function.
280 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400281
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282 // 2. Load the first argument into r5 and get rid of the rest (including the
283 // receiver).
284 {
285 Label no_arguments, done;
286 __ cmpi(r3, Operand::Zero());
287 __ beq(&no_arguments);
288 __ subi(r3, r3, Operand(1));
289 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
290 __ LoadPUX(r5, MemOperand(sp, r5));
291 __ Drop(2);
292 __ b(&done);
293 __ bind(&no_arguments);
294 __ LoadSmiLiteral(r5, Smi::FromInt(0));
295 __ Drop(1);
296 __ bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400297 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400298
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299 // 3. Make sure r5 is a number.
300 {
301 Label done_convert;
302 __ JumpIfSmi(r5, &done_convert);
303 __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE);
304 __ beq(&done_convert);
305 {
306 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
307 __ Push(r4, r6);
308 __ mr(r3, r5);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100309 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310 __ mr(r5, r3);
311 __ Pop(r4, r6);
312 }
313 __ bind(&done_convert);
314 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400315
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316 // 4. Check if new target and constructor differ.
317 Label new_object;
318 __ cmp(r4, r6);
319 __ bne(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 // 5. Allocate a JSValue wrapper for the number.
322 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400323 __ Ret();
324
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 // 6. Fallback to the runtime to create new object.
326 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400327 {
328 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100329 __ Push(r5); // first argument
330 FastNewObjectStub stub(masm->isolate());
331 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400333 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
335 __ Ret();
336}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400337
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000338
339// static
340void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
341 // ----------- S t a t e -------------
342 // -- r3 : number of arguments
343 // -- r4 : constructor function
344 // -- lr : return address
345 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
346 // -- sp[argc * 4] : receiver
347 // -----------------------------------
348
349 // 1. Load the first argument into r3 and get rid of the rest (including the
350 // receiver).
351 Label no_arguments;
352 {
353 __ cmpi(r3, Operand::Zero());
354 __ beq(&no_arguments);
355 __ subi(r3, r3, Operand(1));
356 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
357 __ LoadPUX(r3, MemOperand(sp, r3));
358 __ Drop(2);
359 }
360
361 // 2a. At least one argument, return r3 if it's a string, otherwise
362 // dispatch to appropriate conversion.
363 Label to_string, symbol_descriptive_string;
364 {
365 __ JumpIfSmi(r3, &to_string);
366 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
367 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
368 __ bgt(&to_string);
369 __ beq(&symbol_descriptive_string);
370 __ Ret();
371 }
372
373 // 2b. No arguments, return the empty string (and pop the receiver).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400374 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000375 {
376 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
377 __ Ret(1);
378 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400379
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 // 3a. Convert r3 to a string.
381 __ bind(&to_string);
382 {
383 ToStringStub stub(masm->isolate());
384 __ TailCallStub(&stub);
385 }
386
387 // 3b. Convert symbol in r3 to a string.
388 __ bind(&symbol_descriptive_string);
389 {
390 __ Push(r3);
391 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
392 }
393}
394
395
396// static
397void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
398 // ----------- S t a t e -------------
399 // -- r3 : number of arguments
400 // -- r4 : constructor function
401 // -- r6 : new target
402 // -- lr : return address
403 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
404 // -- sp[argc * 4] : receiver
405 // -----------------------------------
406
407 // 1. Make sure we operate in the context of the called function.
408 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
409
410 // 2. Load the first argument into r5 and get rid of the rest (including the
411 // receiver).
412 {
413 Label no_arguments, done;
414 __ cmpi(r3, Operand::Zero());
415 __ beq(&no_arguments);
416 __ subi(r3, r3, Operand(1));
417 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
418 __ LoadPUX(r5, MemOperand(sp, r5));
419 __ Drop(2);
420 __ b(&done);
421 __ bind(&no_arguments);
422 __ LoadRoot(r5, Heap::kempty_stringRootIndex);
423 __ Drop(1);
424 __ bind(&done);
425 }
426
427 // 3. Make sure r5 is a string.
428 {
429 Label convert, done_convert;
430 __ JumpIfSmi(r5, &convert);
431 __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE);
432 __ blt(&done_convert);
433 __ bind(&convert);
434 {
435 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
436 ToStringStub stub(masm->isolate());
437 __ Push(r4, r6);
438 __ mr(r3, r5);
439 __ CallStub(&stub);
440 __ mr(r5, r3);
441 __ Pop(r4, r6);
442 }
443 __ bind(&done_convert);
444 }
445
446 // 4. Check if new target and constructor differ.
447 Label new_object;
448 __ cmp(r4, r6);
449 __ bne(&new_object);
450
451 // 5. Allocate a JSValue wrapper for the string.
452 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
453 __ Ret();
454
455 // 6. Fallback to the runtime to create new object.
456 __ bind(&new_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400457 {
458 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100459 __ Push(r5); // first argument
460 FastNewObjectStub stub(masm->isolate());
461 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000462 __ Pop(r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400463 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000464 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400465 __ Ret();
466}
467
468
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400469static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
470 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
471 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
472 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
473 __ JumpToJSEntry(ip);
474}
475
Ben Murdoch097c5b22016-05-18 11:27:45 +0100476static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
477 Runtime::FunctionId function_id) {
478 // ----------- S t a t e -------------
479 // -- r3 : argument count (preserved for callee)
480 // -- r4 : target function (preserved for callee)
481 // -- r6 : new target (preserved for callee)
482 // -----------------------------------
483 {
484 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
485 // Push the number of arguments to the callee.
486 // Push a copy of the target function and the new target.
487 // Push function as parameter to the runtime call.
488 __ SmiTag(r3);
489 __ Push(r3, r4, r6, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400490
Ben Murdoch097c5b22016-05-18 11:27:45 +0100491 __ CallRuntime(function_id, 1);
492 __ mr(r5, r3);
493
494 // Restore target function and new target.
495 __ Pop(r3, r4, r6);
496 __ SmiUntag(r3);
497 }
498 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400499 __ JumpToJSEntry(ip);
500}
501
502
503void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
504 // Checking whether the queued function is ready for install is optional,
505 // since we come across interrupts and stack checks elsewhere. However,
506 // not checking may delay installing ready functions, and always checking
507 // would be quite expensive. A good compromise is to first check against
508 // stack limit as a cue for an interrupt signal.
509 Label ok;
510 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
511 __ cmpl(sp, ip);
512 __ bge(&ok);
513
Ben Murdoch097c5b22016-05-18 11:27:45 +0100514 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400515
516 __ bind(&ok);
517 GenerateTailCallToSharedCode(masm);
518}
519
520
521static void Generate_JSConstructStubHelper(MacroAssembler* masm,
522 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100523 bool create_implicit_receiver,
524 bool check_derived_construct) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400525 // ----------- S t a t e -------------
526 // -- r3 : number of arguments
527 // -- r4 : constructor function
528 // -- r5 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000529 // -- r6 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100530 // -- cp : context
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400531 // -- lr : return address
532 // -- sp[...]: constructor arguments
533 // -----------------------------------
534
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400535 Isolate* isolate = masm->isolate();
536
537 // Enter a construct frame.
538 {
539 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 // Preserve the incoming parameters on the stack.
542 __ AssertUndefinedOrAllocationSite(r5, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400543
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544 if (!create_implicit_receiver) {
545 __ SmiTag(r7, r3, SetRC);
Ben Murdochda12d292016-06-02 14:46:10 +0100546 __ Push(cp, r5, r7);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547 __ PushRoot(Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400548 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 __ SmiTag(r3);
Ben Murdochda12d292016-06-02 14:46:10 +0100550 __ Push(cp, r5, r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551
Ben Murdoch097c5b22016-05-18 11:27:45 +0100552 // Allocate the new receiver object.
553 __ Push(r4, r6);
554 FastNewObjectStub stub(masm->isolate());
555 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 __ mr(r7, r3);
557 __ Pop(r4, r6);
558
Ben Murdoch097c5b22016-05-18 11:27:45 +0100559 // ----------- S t a t e -------------
560 // -- r4: constructor function
561 // -- r6: new target
562 // -- r7: newly allocated object
563 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000564
565 // Retrieve smi-tagged arguments count from the stack.
566 __ LoadP(r3, MemOperand(sp));
567 __ SmiUntag(r3, SetRC);
568
569 // Push the allocated receiver to the stack. We need two copies
570 // because we may have to return the original one and the calling
571 // conventions dictate that the called function pops the receiver.
572 __ Push(r7, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400573 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400574
575 // Set up pointer to last argument.
576 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
577
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400578 // Copy arguments and receiver to the expression stack.
579 // r3: number of arguments
580 // r4: constructor function
581 // r5: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000582 // r6: new target
583 // cr0: condition indicating whether r3 is zero
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400584 // sp[0]: receiver
585 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 // sp[2]: number of arguments (smi-tagged)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400587 Label loop, no_args;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000588 __ beq(&no_args, cr0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400589 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 __ sub(sp, sp, ip);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400591 __ mtctr(r3);
592 __ bind(&loop);
593 __ subi(ip, ip, Operand(kPointerSize));
594 __ LoadPX(r0, MemOperand(r5, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595 __ StorePX(r0, MemOperand(sp, ip));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400596 __ bdnz(&loop);
597 __ bind(&no_args);
598
599 // Call the function.
600 // r3: number of arguments
601 // r4: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000602 // r6: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100603
604 ParameterCount actual(r3);
605 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION,
606 CheckDebugStepCallWrapper());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400607
608 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000609 if (create_implicit_receiver && !is_api_function) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400610 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
611 }
612
613 // Restore context from the frame.
614 // r3: result
615 // sp[0]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616 // sp[1]: number of arguments (smi-tagged)
Ben Murdochda12d292016-06-02 14:46:10 +0100617 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400618
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619 if (create_implicit_receiver) {
620 // If the result is an object (in the ECMA sense), we should get rid
621 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
622 // on page 74.
623 Label use_receiver, exit;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400624
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000625 // If the result is a smi, it is *not* an object in the ECMA sense.
626 // r3: result
627 // sp[0]: receiver
628 // sp[1]: number of arguments (smi-tagged)
629 __ JumpIfSmi(r3, &use_receiver);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400630
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631 // If the type of the result (stored in its map) is less than
632 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
633 __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE);
634 __ bge(&exit);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000636 // Throw away the result of the constructor invocation and use the
637 // on-stack receiver as the result.
638 __ bind(&use_receiver);
639 __ LoadP(r3, MemOperand(sp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400640
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000641 // Remove receiver from the stack, remove caller arguments, and
642 // return.
643 __ bind(&exit);
644 // r3: result
645 // sp[0]: receiver (newly allocated object)
646 // sp[1]: number of arguments (smi-tagged)
647 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
648 } else {
649 __ LoadP(r4, MemOperand(sp));
650 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400651
652 // Leave construct frame.
653 }
654
Ben Murdoch097c5b22016-05-18 11:27:45 +0100655 // ES6 9.2.2. Step 13+
656 // Check that the result is not a Smi, indicating that the constructor result
657 // from a derived class is neither undefined nor an Object.
658 if (check_derived_construct) {
659 Label dont_throw;
660 __ JumpIfNotSmi(r3, &dont_throw);
661 {
662 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
663 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
664 }
665 __ bind(&dont_throw);
666 }
667
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400668 __ SmiToPtrArrayOffset(r4, r4);
669 __ add(sp, sp, r4);
670 __ addi(sp, sp, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000671 if (create_implicit_receiver) {
672 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
673 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400674 __ blr();
675}
676
677
678void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100679 Generate_JSConstructStubHelper(masm, false, true, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400680}
681
682
683void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100684 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000685}
686
687
688void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100689 Generate_JSConstructStubHelper(masm, false, false, false);
690}
691
692
693void Builtins::Generate_JSBuiltinsConstructStubForDerived(
694 MacroAssembler* masm) {
695 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000696}
697
Ben Murdochc5610432016-08-08 18:44:38 +0100698// static
699void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
700 // ----------- S t a t e -------------
701 // -- r3 : the value to pass to the generator
702 // -- r4 : the JSGeneratorObject to resume
703 // -- r5 : the resume mode (tagged)
704 // -- lr : return address
705 // -----------------------------------
706 __ AssertGeneratorObject(r4);
707
708 // Store input value into generator object.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100709 __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset),
710 r0);
711 __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
Ben Murdochc5610432016-08-08 18:44:38 +0100712 kLRHasNotBeenSaved, kDontSaveFPRegs);
713
714 // Store resume mode into generator object.
715 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kResumeModeOffset), r0);
716
717 // Load suspended function and context.
718 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
719 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
720
721 // Flood function if we are stepping.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100722 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
723 Label stepping_prepared;
724 ExternalReference last_step_action =
725 ExternalReference::debug_last_step_action_address(masm->isolate());
726 STATIC_ASSERT(StepFrame > StepIn);
727 __ mov(ip, Operand(last_step_action));
728 __ LoadByte(ip, MemOperand(ip), r0);
729 __ extsb(ip, ip);
730 __ cmpi(ip, Operand(StepIn));
731 __ bge(&prepare_step_in_if_stepping);
732
733 // Flood function if we need to continue stepping in the suspended generator.
734
735 ExternalReference debug_suspended_generator =
736 ExternalReference::debug_suspended_generator_address(masm->isolate());
737
738 __ mov(ip, Operand(debug_suspended_generator));
739 __ LoadP(ip, MemOperand(ip));
740 __ cmp(ip, r4);
741 __ beq(&prepare_step_in_suspended_generator);
742 __ bind(&stepping_prepared);
Ben Murdochc5610432016-08-08 18:44:38 +0100743
744 // Push receiver.
745 __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
746 __ Push(ip);
747
748 // ----------- S t a t e -------------
749 // -- r4 : the JSGeneratorObject to resume
750 // -- r5 : the resume mode (tagged)
751 // -- r7 : generator function
752 // -- cp : generator context
753 // -- lr : return address
754 // -- sp[0] : generator receiver
755 // -----------------------------------
756
757 // Push holes for arguments to generator function. Since the parser forced
758 // context allocation for any variables in generators, the actual argument
759 // values have already been copied into the context and these dummy values
760 // will never be used.
761 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
762 __ LoadWordArith(
763 r3, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
764 {
765 Label loop, done_loop;
766 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
767#if V8_TARGET_ARCH_PPC64
768 __ cmpi(r3, Operand::Zero());
769 __ beq(&done_loop);
770#else
771 __ SmiUntag(r3, SetRC);
772 __ beq(&done_loop, cr0);
773#endif
774 __ mtctr(r3);
775 __ bind(&loop);
776 __ push(ip);
777 __ bdnz(&loop);
778 __ bind(&done_loop);
779 }
780
781 // Dispatch on the kind of generator object.
782 Label old_generator;
783 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
784 __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
785 __ bne(&old_generator);
786
787 // New-style (ignition/turbofan) generator object
788 {
789 // We abuse new.target both to indicate that this is a resume call and to
790 // pass in the generator object. In ordinary calls, new.target is always
791 // undefined because generator functions are non-constructable.
792 __ mr(r6, r4);
793 __ mr(r4, r7);
794 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
795 __ JumpToJSEntry(ip);
796 }
797
798 // Old-style (full-codegen) generator object
799 __ bind(&old_generator);
800 {
801 // Enter a new JavaScript frame, and initialize its slots as they were when
802 // the generator was suspended.
803 FrameScope scope(masm, StackFrame::MANUAL);
804 __ PushStandardFrame(r7);
805
806 // Restore the operand stack.
807 __ LoadP(r3, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
808 __ LoadP(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
809 __ addi(r3, r3,
810 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
811 {
812 Label loop, done_loop;
813 __ SmiUntag(r6, SetRC);
814 __ beq(&done_loop, cr0);
815 __ mtctr(r6);
816 __ bind(&loop);
817 __ LoadPU(ip, MemOperand(r3, kPointerSize));
818 __ Push(ip);
819 __ bdnz(&loop);
820 __ bind(&done_loop);
821 }
822
823 // Reset operand stack so we don't leak.
824 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
825 __ StoreP(ip, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset),
826 r0);
827
828 // Resume the generator function at the continuation.
829 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
830 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset));
831 __ addi(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
832 {
833 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
834 if (FLAG_enable_embedded_constant_pool) {
835 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r6);
836 }
837 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
838 __ SmiUntag(r5);
839 __ add(r6, r6, r5);
840 __ LoadSmiLiteral(r5,
841 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
842 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
843 r0);
844 __ mr(r3, r4); // Continuation expects generator object in r3.
845 __ Jump(r6);
846 }
847 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100848
849 __ bind(&prepare_step_in_if_stepping);
850 {
851 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
852 __ Push(r4, r5, r7);
853 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
854 __ Pop(r4, r5);
855 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
856 }
857 __ b(&stepping_prepared);
858
859 __ bind(&prepare_step_in_suspended_generator);
860 {
861 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
862 __ Push(r4, r5);
863 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
864 __ Pop(r4, r5);
865 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
866 }
867 __ b(&stepping_prepared);
Ben Murdochc5610432016-08-08 18:44:38 +0100868}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000869
870void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
871 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
872 __ push(r4);
873 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
874}
875
876
877enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
878
879
880// Clobbers r5; preserves all other registers.
881static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
882 IsTagged argc_is_tagged) {
883 // Check the stack for overflow. We are not trying to catch
884 // interruptions (e.g. debug break and preemption) here, so the "real stack
885 // limit" is checked.
886 Label okay;
887 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
888 // Make r5 the space we have left. The stack might already be overflowed
889 // here which will cause r5 to become negative.
890 __ sub(r5, sp, r5);
891 // Check if the arguments will overflow the stack.
892 if (argc_is_tagged == kArgcIsSmiTagged) {
893 __ SmiToPtrArrayOffset(r0, argc);
894 } else {
895 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
896 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
897 }
898 __ cmp(r5, r0);
899 __ bgt(&okay); // Signed comparison.
900
901 // Out of stack space.
902 __ CallRuntime(Runtime::kThrowStackOverflow);
903
904 __ bind(&okay);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400905}
906
907
908static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
909 bool is_construct) {
910 // Called from Generate_JS_Entry
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000911 // r3: new.target
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400912 // r4: function
913 // r5: receiver
914 // r6: argc
915 // r7: argv
916 // r0,r8-r9, cp may be clobbered
917 ProfileEntryHookStub::MaybeCallEntryHook(masm);
918
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400919 // Enter an internal frame.
920 {
921 FrameScope scope(masm, StackFrame::INTERNAL);
922
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000923 // Setup the context (we need to use the caller context from the isolate).
924 ExternalReference context_address(Isolate::kContextAddress,
925 masm->isolate());
926 __ mov(cp, Operand(context_address));
927 __ LoadP(cp, MemOperand(cp));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400928
929 __ InitializeRootRegister();
930
931 // Push the function and the receiver onto the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000932 __ Push(r4, r5);
933
934 // Check if we have enough stack space to push all arguments.
935 // Clobbers r5.
936 Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400937
938 // Copy arguments to the stack in a loop.
939 // r4: function
940 // r6: argc
941 // r7: argv, i.e. points to first arg
942 Label loop, entry;
943 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
944 __ add(r5, r7, r0);
945 // r5 points past last arg.
946 __ b(&entry);
947 __ bind(&loop);
948 __ LoadP(r8, MemOperand(r7)); // read next parameter
949 __ addi(r7, r7, Operand(kPointerSize));
950 __ LoadP(r0, MemOperand(r8)); // dereference handle
951 __ push(r0); // push parameter
952 __ bind(&entry);
953 __ cmp(r7, r5);
954 __ bne(&loop);
955
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000956 // Setup new.target and argc.
957 __ mr(r7, r3);
958 __ mr(r3, r6);
959 __ mr(r6, r7);
960
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400961 // Initialize all JavaScript callee-saved registers, since they will be seen
962 // by the garbage collector as part of handlers.
963 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
964 __ mr(r14, r7);
965 __ mr(r15, r7);
966 __ mr(r16, r7);
967 __ mr(r17, r7);
968
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000969 // Invoke the code.
970 Handle<Code> builtin = is_construct
971 ? masm->isolate()->builtins()->Construct()
972 : masm->isolate()->builtins()->Call();
973 __ Call(builtin, RelocInfo::CODE_TARGET);
974
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400975 // Exit the JS frame and remove the parameters (except function), and
976 // return.
977 }
978 __ blr();
979
980 // r3: result
981}
982
983
984void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
985 Generate_JSEntryTrampolineHelper(masm, false);
986}
987
988
989void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
990 Generate_JSEntryTrampolineHelper(masm, true);
991}
992
Ben Murdoch61f157c2016-09-16 13:49:30 +0100993static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
994 Register args_count = scratch;
995
996 // Get the arguments + receiver count.
997 __ LoadP(args_count,
998 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
999 __ lwz(args_count,
1000 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1001
1002 // Leave the frame (also dropping the register file).
1003 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1004
1005 __ add(sp, sp, args_count);
1006}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001007
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001008// Generate code for entering a JS function with the interpreter.
1009// On entry to the function the receiver and arguments have been pushed on the
1010// stack left to right. The actual argument count matches the formal parameter
1011// count expected by the function.
1012//
1013// The live registers are:
1014// o r4: the JS function object being called.
1015// o r6: the new target
1016// o cp: our context
1017// o pp: the caller's constant pool pointer (if enabled)
1018// o fp: the caller's frame pointer
1019// o sp: stack pointer
1020// o lr: return address
1021//
Ben Murdoch097c5b22016-05-18 11:27:45 +01001022// The function builds an interpreter frame. See InterpreterFrameConstants in
1023// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001025 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1026
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027 // Open a frame scope to indicate that there is a frame on the stack. The
1028 // MANUAL indicates that the scope shouldn't actually generate code to set up
1029 // the frame (that is done below).
1030 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +01001031 __ PushStandardFrame(r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001032
Ben Murdochc5610432016-08-08 18:44:38 +01001033 // Get the bytecode array from the function object (or from the DebugInfo if
1034 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001036 Label array_done;
1037 Register debug_info = r5;
1038 DCHECK(!debug_info.is(r3));
1039 __ LoadP(debug_info,
1040 FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset));
1041 // Load original bytecode array or the debug copy.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 __ LoadP(kInterpreterBytecodeArrayRegister,
1043 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001044 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
1045 __ beq(&array_done);
1046 __ LoadP(kInterpreterBytecodeArrayRegister,
1047 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1048 __ bind(&array_done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049
Ben Murdochc5610432016-08-08 18:44:38 +01001050 // Check function data field is actually a BytecodeArray object.
1051 Label bytecode_array_not_present;
1052 __ CompareRoot(kInterpreterBytecodeArrayRegister,
1053 Heap::kUndefinedValueRootIndex);
1054 __ beq(&bytecode_array_not_present);
1055
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001056 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001057 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1058 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1059 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1060 BYTECODE_ARRAY_TYPE);
1061 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1062 }
1063
Ben Murdochc5610432016-08-08 18:44:38 +01001064 // Load initial bytecode offset.
1065 __ mov(kInterpreterBytecodeOffsetRegister,
1066 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1067
1068 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1069 __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001070 __ Push(r6, kInterpreterBytecodeArrayRegister, r3);
1071
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001072 // Allocate the local and temporary register file on the stack.
1073 {
1074 // Load frame size (word) from the BytecodeArray object.
1075 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1076 BytecodeArray::kFrameSizeOffset));
1077
1078 // Do a stack check to ensure we don't go over the limit.
1079 Label ok;
1080 __ sub(r6, sp, r5);
1081 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
1082 __ cmpl(r6, r0);
1083 __ bge(&ok);
1084 __ CallRuntime(Runtime::kThrowStackOverflow);
1085 __ bind(&ok);
1086
1087 // If ok, push undefined as the initial value for all register file entries.
1088 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1089 Label loop, no_args;
1090 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1091 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
1092 __ beq(&no_args, cr0);
1093 __ mtctr(r5);
1094 __ bind(&loop);
1095 __ push(r6);
1096 __ bdnz(&loop);
1097 __ bind(&no_args);
1098 }
1099
Ben Murdochc5610432016-08-08 18:44:38 +01001100 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001102 __ mov(kInterpreterDispatchTableRegister,
1103 Operand(ExternalReference::interpreter_dispatch_table_address(
1104 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001105
1106 // Dispatch to the first bytecode handler for the function.
1107 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1108 kInterpreterBytecodeOffsetRegister));
1109 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1110 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001111 __ Call(ip);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001112
Ben Murdochc5610432016-08-08 18:44:38 +01001113 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001114
Ben Murdochc5610432016-08-08 18:44:38 +01001115 // The return value is in r3.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001116 LeaveInterpreterFrame(masm, r5);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001117 __ blr();
Ben Murdochc5610432016-08-08 18:44:38 +01001118
1119 // If the bytecode array is no longer present, then the underlying function
1120 // has been switched to a different kind of code and we heal the closure by
1121 // switching the code entry field over to the new code object as well.
1122 __ bind(&bytecode_array_not_present);
1123 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1124 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1125 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kCodeOffset));
1126 __ addi(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1127 __ StoreP(r7, FieldMemOperand(r4, JSFunction::kCodeEntryOffset), r0);
1128 __ RecordWriteCodeEntryField(r4, r7, r8);
1129 __ JumpToJSEntry(r7);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001130}
1131
Ben Murdoch61f157c2016-09-16 13:49:30 +01001132void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
1133 // Save the function and context for call to CompileBaseline.
1134 __ LoadP(r4, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1135 __ LoadP(kContextRegister,
1136 MemOperand(fp, StandardFrameConstants::kContextOffset));
1137
1138 // Leave the frame before recompiling for baseline so that we don't count as
1139 // an activation on the stack.
1140 LeaveInterpreterFrame(masm, r5);
1141
1142 {
1143 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1144 // Push return value.
1145 __ push(r3);
1146
1147 // Push function as argument and compile for baseline.
1148 __ push(r4);
1149 __ CallRuntime(Runtime::kCompileBaseline);
1150
1151 // Restore return value.
1152 __ pop(r3);
1153 }
1154 __ blr();
1155}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156
1157static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
1158 Register count, Register scratch) {
1159 Label loop;
1160 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU
1161 __ mtctr(count);
1162 __ bind(&loop);
1163 __ LoadPU(scratch, MemOperand(index, -kPointerSize));
1164 __ push(scratch);
1165 __ bdnz(&loop);
1166}
1167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001168// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001169void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1170 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001171 // ----------- S t a t e -------------
1172 // -- r3 : the number of arguments (not including the receiver)
1173 // -- r5 : the address of the first argument to be pushed. Subsequent
1174 // arguments should be consecutive above this, in the same order as
1175 // they are to be pushed onto the stack.
1176 // -- r4 : the target to call (can be any Object).
1177 // -----------------------------------
1178
1179 // Calculate number of arguments (add one for receiver).
1180 __ addi(r6, r3, Operand(1));
1181
1182 // Push the arguments.
1183 Generate_InterpreterPushArgs(masm, r5, r6, r7);
1184
1185 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001186 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1187 tail_call_mode),
1188 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189}
1190
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191// static
1192void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1193 // ----------- S t a t e -------------
1194 // -- r3 : argument count (not including receiver)
1195 // -- r6 : new target
1196 // -- r4 : constructor to call
1197 // -- r5 : address of the first argument
1198 // -----------------------------------
1199
1200 // Push a slot for the receiver to be constructed.
1201 __ li(r0, Operand::Zero());
1202 __ push(r0);
1203
1204 // Push the arguments (skip if none).
1205 Label skip;
1206 __ cmpi(r3, Operand::Zero());
1207 __ beq(&skip);
1208 Generate_InterpreterPushArgs(masm, r5, r3, r7);
1209 __ bind(&skip);
1210
1211 // Call the constructor with r3, r4, and r6 unmodified.
1212 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1213}
1214
Ben Murdochc5610432016-08-08 18:44:38 +01001215void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1216 // Set the return address to the correct point in the interpreter entry
1217 // trampoline.
1218 Smi* interpreter_entry_return_pc_offset(
1219 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1220 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1221 __ Move(r5, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1222 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() +
1223 Code::kHeaderSize - kHeapObjectTag));
1224 __ mtlr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225
Ben Murdochc5610432016-08-08 18:44:38 +01001226 // Initialize the dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001227 __ mov(kInterpreterDispatchTableRegister,
1228 Operand(ExternalReference::interpreter_dispatch_table_address(
1229 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001230
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001231 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001232 __ LoadP(kInterpreterBytecodeArrayRegister,
1233 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001234
1235 if (FLAG_debug_code) {
1236 // Check function data field is actually a BytecodeArray object.
1237 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1238 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1239 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1240 BYTECODE_ARRAY_TYPE);
1241 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1242 }
1243
1244 // Get the target bytecode offset from the frame.
1245 __ LoadP(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001246 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001247 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1248
1249 // Dispatch to the target bytecode.
1250 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1251 kInterpreterBytecodeOffsetRegister));
1252 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1253 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001254 __ Jump(ip);
1255}
1256
1257
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001258void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001259 // ----------- S t a t e -------------
1260 // -- r3 : argument count (preserved for callee)
1261 // -- r6 : new target (preserved for callee)
1262 // -- r4 : target function (preserved for callee)
1263 // -----------------------------------
1264 // First lookup code, maybe we don't need to compile!
1265 Label gotta_call_runtime;
1266 Label maybe_call_runtime;
1267 Label try_shared;
1268 Label loop_top, loop_bottom;
1269
1270 Register closure = r4;
1271 Register map = r9;
1272 Register index = r5;
1273 __ LoadP(map,
1274 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1275 __ LoadP(map,
1276 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1277 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1278 __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
1279 __ blt(&gotta_call_runtime);
1280
1281 // Find literals.
1282 // r10 : native context
1283 // r5 : length / index
1284 // r9 : optimized code map
1285 // r6 : new target
1286 // r4 : closure
1287 Register native_context = r10;
1288 __ LoadP(native_context, NativeContextMemOperand());
1289
1290 __ bind(&loop_top);
1291 Register temp = r11;
1292 Register array_pointer = r8;
1293
1294 // Does the native context match?
1295 __ SmiToPtrArrayOffset(array_pointer, index);
1296 __ add(array_pointer, map, array_pointer);
1297 __ LoadP(temp, FieldMemOperand(array_pointer,
1298 SharedFunctionInfo::kOffsetToPreviousContext));
1299 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1300 __ cmp(temp, native_context);
1301 __ bne(&loop_bottom);
1302 // OSR id set to none?
1303 __ LoadP(temp,
1304 FieldMemOperand(array_pointer,
1305 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1306 const int bailout_id = BailoutId::None().ToInt();
1307 __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0);
1308 __ bne(&loop_bottom);
1309 // Literals available?
1310 __ LoadP(temp,
1311 FieldMemOperand(array_pointer,
1312 SharedFunctionInfo::kOffsetToPreviousLiterals));
1313 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1314 __ JumpIfSmi(temp, &gotta_call_runtime);
1315
1316 // Save the literals in the closure.
1317 __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
1318 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r7,
1319 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1320 OMIT_SMI_CHECK);
1321
1322 // Code available?
1323 Register entry = r7;
1324 __ LoadP(entry,
1325 FieldMemOperand(array_pointer,
1326 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1327 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1328 __ JumpIfSmi(entry, &maybe_call_runtime);
1329
1330 // Found literals and code. Get them into the closure and return.
1331 // Store code entry in the closure.
1332 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1333
1334 Label install_optimized_code_and_tailcall;
1335 __ bind(&install_optimized_code_and_tailcall);
1336 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1337 __ RecordWriteCodeEntryField(closure, entry, r8);
1338
1339 // Link the closure into the optimized function list.
1340 // r7 : code entry
1341 // r10: native context
1342 // r4 : closure
1343 __ LoadP(
1344 r8, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1345 __ StoreP(r8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
1346 r0);
1347 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r8, temp,
1348 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1349 OMIT_SMI_CHECK);
1350 const int function_list_offset =
1351 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1352 __ StoreP(
1353 closure,
1354 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
1355 // Save closure before the write barrier.
1356 __ mr(r8, closure);
1357 __ RecordWriteContextSlot(native_context, function_list_offset, r8, temp,
1358 kLRHasNotBeenSaved, kDontSaveFPRegs);
1359 __ JumpToJSEntry(entry);
1360
1361 __ bind(&loop_bottom);
1362 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
1363 r0);
1364 __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
1365 __ bgt(&loop_top);
1366
1367 // We found neither literals nor code.
1368 __ b(&gotta_call_runtime);
1369
1370 __ bind(&maybe_call_runtime);
1371
1372 // Last possibility. Check the context free optimized code map entry.
1373 __ LoadP(entry,
1374 FieldMemOperand(map, FixedArray::kHeaderSize +
1375 SharedFunctionInfo::kSharedCodeIndex));
1376 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1377 __ JumpIfSmi(entry, &try_shared);
1378
1379 // Store code entry in the closure.
1380 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1381 __ b(&install_optimized_code_and_tailcall);
1382
1383 __ bind(&try_shared);
1384 // Is the full code valid?
1385 __ LoadP(entry,
1386 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1387 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1388 __ lwz(r8, FieldMemOperand(entry, Code::kFlagsOffset));
1389 __ DecodeField<Code::KindField>(r8);
1390 __ cmpi(r8, Operand(Code::BUILTIN));
1391 __ beq(&gotta_call_runtime);
1392 // Yes, install the full code.
1393 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1394 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1395 __ RecordWriteCodeEntryField(closure, entry, r8);
1396 __ JumpToJSEntry(entry);
1397
1398 __ bind(&gotta_call_runtime);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001399 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001400}
1401
Ben Murdochc5610432016-08-08 18:44:38 +01001402void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1403 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1404}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001405
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001406void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001407 GenerateTailCallToReturnedCode(masm,
1408 Runtime::kCompileOptimized_NotConcurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001409}
1410
1411
1412void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001413 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001414}
1415
1416
1417static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1418 // For now, we are relying on the fact that make_code_young doesn't do any
1419 // garbage collection which allows us to save/restore the registers without
1420 // worrying about which of them contain pointers. We also don't build an
1421 // internal frame to make the code faster, since we shouldn't have to do stack
1422 // crawls in MakeCodeYoung. This seems a bit fragile.
1423
1424 // Point r3 at the start of the PlatformCodeAge sequence.
1425 __ mr(r3, ip);
1426
1427 // The following registers must be saved and restored when calling through to
1428 // the runtime:
1429 // r3 - contains return address (beginning of patch sequence)
1430 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001431 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001432 // lr - return address
1433 FrameScope scope(masm, StackFrame::MANUAL);
1434 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001435 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001436 __ PrepareCallCFunction(2, 0, r5);
1437 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1438 __ CallCFunction(
1439 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001441 __ mtlr(r0);
1442 __ mr(ip, r3);
1443 __ Jump(ip);
1444}
1445
1446#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1447 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1448 MacroAssembler* masm) { \
1449 GenerateMakeCodeYoungAgainCommon(masm); \
1450 } \
1451 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1452 MacroAssembler* masm) { \
1453 GenerateMakeCodeYoungAgainCommon(masm); \
1454 }
1455CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1456#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1457
1458
1459void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1460 // For now, we are relying on the fact that make_code_young doesn't do any
1461 // garbage collection which allows us to save/restore the registers without
1462 // worrying about which of them contain pointers. We also don't build an
1463 // internal frame to make the code faster, since we shouldn't have to do stack
1464 // crawls in MakeCodeYoung. This seems a bit fragile.
1465
1466 // Point r3 at the start of the PlatformCodeAge sequence.
1467 __ mr(r3, ip);
1468
1469 // The following registers must be saved and restored when calling through to
1470 // the runtime:
1471 // r3 - contains return address (beginning of patch sequence)
1472 // r4 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473 // r6 - new target
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001474 // lr - return address
1475 FrameScope scope(masm, StackFrame::MANUAL);
1476 __ mflr(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001477 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001478 __ PrepareCallCFunction(2, 0, r5);
1479 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1480 __ CallCFunction(
1481 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1482 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001483 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001484 __ mtlr(r0);
1485 __ mr(ip, r3);
1486
1487 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001488 __ PushStandardFrame(r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001489
1490 // Jump to point after the code-age stub.
1491 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1492 __ Jump(r3);
1493}
1494
1495
1496void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1497 GenerateMakeCodeYoungAgainCommon(masm);
1498}
1499
1500
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001501void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1502 Generate_MarkCodeAsExecutedOnce(masm);
1503}
1504
1505
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001506static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1507 SaveFPRegsMode save_doubles) {
1508 {
1509 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1510
1511 // Preserve registers across notification, this is important for compiled
1512 // stubs that tail call the runtime on deopts passing their parameters in
1513 // registers.
1514 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1515 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001517 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1518 }
1519
1520 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1521 __ blr(); // Jump to miss handler
1522}
1523
1524
1525void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1526 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1527}
1528
1529
1530void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1531 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1532}
1533
1534
1535static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1536 Deoptimizer::BailoutType type) {
1537 {
1538 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1539 // Pass the function and deoptimization type to the runtime system.
1540 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1541 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001542 __ CallRuntime(Runtime::kNotifyDeoptimized);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001543 }
1544
1545 // Get the full codegen state from the stack and untag it -> r9.
1546 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1547 __ SmiUntag(r9);
1548 // Switch on the state.
1549 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001550 __ cmpi(
1551 r9,
1552 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001553 __ bne(&with_tos_register);
1554 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1555 __ Ret();
1556
1557 __ bind(&with_tos_register);
Ben Murdochc5610432016-08-08 18:44:38 +01001558 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001559 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001560 __ cmpi(
1561 r9,
1562 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001563 __ bne(&unknown_state);
1564 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1565 __ Ret();
1566
1567 __ bind(&unknown_state);
1568 __ stop("no cases left");
1569}
1570
1571
1572void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1573 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1574}
1575
1576
1577void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1578 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1579}
1580
1581
1582void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1583 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1584}
1585
1586
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001587// Clobbers registers {r7, r8, r9, r10}.
1588void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1589 Register function_template_info,
1590 Label* receiver_check_failed) {
1591 Register signature = r7;
1592 Register map = r8;
1593 Register constructor = r9;
1594 Register scratch = r10;
1595
1596 // If there is no signature, return the holder.
1597 __ LoadP(signature, FieldMemOperand(function_template_info,
1598 FunctionTemplateInfo::kSignatureOffset));
1599 Label receiver_check_passed;
1600 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1601 &receiver_check_passed);
1602
1603 // Walk the prototype chain.
1604 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1605 Label prototype_loop_start;
1606 __ bind(&prototype_loop_start);
1607
1608 // Get the constructor, if any.
1609 __ GetMapConstructor(constructor, map, scratch, scratch);
1610 __ cmpi(scratch, Operand(JS_FUNCTION_TYPE));
1611 Label next_prototype;
1612 __ bne(&next_prototype);
1613 Register type = constructor;
1614 __ LoadP(type,
1615 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1616 __ LoadP(type,
1617 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1618
1619 // Loop through the chain of inheriting function templates.
1620 Label function_template_loop;
1621 __ bind(&function_template_loop);
1622
1623 // If the signatures match, we have a compatible receiver.
1624 __ cmp(signature, type);
1625 __ beq(&receiver_check_passed);
1626
1627 // If the current type is not a FunctionTemplateInfo, load the next prototype
1628 // in the chain.
1629 __ JumpIfSmi(type, &next_prototype);
1630 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1631 __ bne(&next_prototype);
1632
1633 // Otherwise load the parent function template and iterate.
1634 __ LoadP(type,
1635 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1636 __ b(&function_template_loop);
1637
1638 // Load the next prototype.
1639 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001640 __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001641 __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001642 __ beq(receiver_check_failed, cr0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001643
1644 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1645 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001646 // Iterate.
1647 __ b(&prototype_loop_start);
1648
1649 __ bind(&receiver_check_passed);
1650}
1651
1652
1653void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1654 // ----------- S t a t e -------------
1655 // -- r3 : number of arguments excluding receiver
1656 // -- r4 : callee
1657 // -- lr : return address
1658 // -- sp[0] : last argument
1659 // -- ...
1660 // -- sp[4 * (argc - 1)] : first argument
1661 // -- sp[4 * argc] : receiver
1662 // -----------------------------------
1663
1664
1665 // Load the FunctionTemplateInfo.
1666 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1667 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
1668
1669 // Do the compatible receiver check.
1670 Label receiver_check_failed;
1671 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2));
1672 __ LoadPX(r5, MemOperand(sp, r11));
1673 CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed);
1674
1675 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1676 // beginning of the code.
1677 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset));
1678 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset));
1679 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1680 __ JumpToJSEntry(ip);
1681
1682 // Compatible receiver check failed: throw an Illegal Invocation exception.
1683 __ bind(&receiver_check_failed);
1684 // Drop the arguments (including the receiver);
1685 __ addi(r11, r11, Operand(kPointerSize));
1686 __ add(sp, sp, r11);
1687 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1688}
1689
1690
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001691void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1692 // Lookup the function in the JavaScript frame.
1693 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1694 {
1695 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1696 // Pass function as argument.
1697 __ push(r3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001698 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001699 }
1700
1701 // If the code object is null, just return to the unoptimized code.
1702 Label skip;
1703 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1704 __ bne(&skip);
1705 __ Ret();
1706
1707 __ bind(&skip);
1708
1709 // Load deoptimization data from the code object.
1710 // <deopt_data> = <code>[#deoptimization_data_offset]
1711 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1712
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001713 {
1714 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001715 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1716
1717 if (FLAG_enable_embedded_constant_pool) {
1718 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1719 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001720
1721 // Load the OSR entrypoint offset from the deoptimization data.
1722 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1723 __ LoadP(r4, FieldMemOperand(
1724 r4, FixedArray::OffsetOfElementAt(
1725 DeoptimizationInputData::kOsrPcOffsetIndex)));
1726 __ SmiUntag(r4);
1727
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001728 // Compute the target address = code start + osr_offset
1729 __ add(r0, r3, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001730
1731 // And "return" to the OSR entry point of the function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 __ mtlr(r0);
1733 __ blr();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001734 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001735}
1736
1737
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001738// static
1739void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1740 int field_index) {
1741 // ----------- S t a t e -------------
Ben Murdoch61f157c2016-09-16 13:49:30 +01001742 // -- r3 : number of arguments
1743 // -- r4 : function
1744 // -- cp : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745 // -- lr : return address
1746 // -- sp[0] : receiver
1747 // -----------------------------------
1748
1749 // 1. Pop receiver into r3 and check that it's actually a JSDate object.
1750 Label receiver_not_date;
1751 {
1752 __ Pop(r3);
1753 __ JumpIfSmi(r3, &receiver_not_date);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001754 __ CompareObjectType(r3, r5, r6, JS_DATE_TYPE);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755 __ bne(&receiver_not_date);
1756 }
1757
1758 // 2. Load the specified date field, falling back to the runtime as necessary.
1759 if (field_index == JSDate::kDateValue) {
1760 __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset));
1761 } else {
1762 if (field_index < JSDate::kFirstUncachedField) {
1763 Label stamp_mismatch;
1764 __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1765 __ LoadP(r4, MemOperand(r4));
1766 __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset));
1767 __ cmp(r4, ip);
1768 __ bne(&stamp_mismatch);
1769 __ LoadP(r3, FieldMemOperand(
1770 r3, JSDate::kValueOffset + field_index * kPointerSize));
1771 __ Ret();
1772 __ bind(&stamp_mismatch);
1773 }
1774 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1775 __ PrepareCallCFunction(2, r4);
1776 __ LoadSmiLiteral(r4, Smi::FromInt(field_index));
1777 __ CallCFunction(
1778 ExternalReference::get_date_field_function(masm->isolate()), 2);
1779 }
1780 __ Ret();
1781
1782 // 3. Raise a TypeError if the receiver is not a date.
1783 __ bind(&receiver_not_date);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001784 {
1785 FrameScope scope(masm, StackFrame::MANUAL);
1786 __ push(r3);
1787 __ PushStandardFrame(r4);
1788 __ LoadSmiLiteral(r7, Smi::FromInt(0));
1789 __ push(r7);
1790 __ CallRuntime(Runtime::kThrowNotDateError);
1791 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001792}
1793
Ben Murdochda12d292016-06-02 14:46:10 +01001794// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001795void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1796 // ----------- S t a t e -------------
1797 // -- r3 : argc
1798 // -- sp[0] : argArray
1799 // -- sp[4] : thisArg
1800 // -- sp[8] : receiver
1801 // -----------------------------------
1802
1803 // 1. Load receiver into r4, argArray into r3 (if present), remove all
1804 // arguments from the stack (including the receiver), and push thisArg (if
1805 // present) instead.
1806 {
1807 Label skip;
1808 Register arg_size = r5;
1809 Register new_sp = r6;
1810 Register scratch = r7;
1811 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1812 __ add(new_sp, sp, arg_size);
1813 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1814 __ mr(scratch, r3);
1815 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver
1816 __ cmpi(arg_size, Operand(kPointerSize));
1817 __ blt(&skip);
1818 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1819 __ beq(&skip);
1820 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1821 __ bind(&skip);
1822 __ mr(sp, new_sp);
1823 __ StoreP(scratch, MemOperand(sp, 0));
1824 }
1825
1826 // ----------- S t a t e -------------
1827 // -- r3 : argArray
1828 // -- r4 : receiver
1829 // -- sp[0] : thisArg
1830 // -----------------------------------
1831
1832 // 2. Make sure the receiver is actually callable.
1833 Label receiver_not_callable;
1834 __ JumpIfSmi(r4, &receiver_not_callable);
1835 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1836 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1837 __ TestBit(r7, Map::kIsCallable, r0);
1838 __ beq(&receiver_not_callable, cr0);
1839
1840 // 3. Tail call with no arguments if argArray is null or undefined.
1841 Label no_arguments;
1842 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
1843 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
1844
1845 // 4a. Apply the receiver to the given argArray (passing undefined for
1846 // new.target).
1847 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1848 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1849
1850 // 4b. The argArray is either null or undefined, so we tail call without any
1851 // arguments to the receiver.
1852 __ bind(&no_arguments);
1853 {
1854 __ li(r3, Operand::Zero());
1855 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1856 }
1857
1858 // 4c. The receiver is not callable, throw an appropriate TypeError.
1859 __ bind(&receiver_not_callable);
1860 {
1861 __ StoreP(r4, MemOperand(sp, 0));
1862 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1863 }
1864}
1865
1866
1867// static
1868void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001869 // 1. Make sure we have at least one argument.
1870 // r3: actual number of arguments
1871 {
1872 Label done;
1873 __ cmpi(r3, Operand::Zero());
1874 __ bne(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001875 __ PushRoot(Heap::kUndefinedValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001876 __ addi(r3, r3, Operand(1));
1877 __ bind(&done);
1878 }
1879
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001880 // 2. Get the callable to call (passed as receiver) from the stack.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001881 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001882 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1883 __ LoadPX(r4, MemOperand(sp, r5));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001884
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001885 // 3. Shift arguments and return address one slot down on the stack
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001886 // (overwriting the original receiver). Adjust argument count to make
1887 // the original first argument the new receiver.
1888 // r3: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001889 // r4: callable
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001890 {
1891 Label loop;
1892 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001893 __ add(r5, sp, r5);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001894
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001895
1896 __ mtctr(r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001897 __ bind(&loop);
1898 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1899 __ StoreP(ip, MemOperand(r5));
1900 __ subi(r5, r5, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901 __ bdnz(&loop);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001902 // Adjust the actual number of arguments and remove the top element
1903 // (which is a copy of the last argument).
1904 __ subi(r3, r3, Operand(1));
1905 __ pop();
1906 }
1907
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908 // 4. Call the callable.
1909 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001910}
1911
1912
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001913void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1914 // ----------- S t a t e -------------
1915 // -- r3 : argc
1916 // -- sp[0] : argumentsList
1917 // -- sp[4] : thisArgument
1918 // -- sp[8] : target
1919 // -- sp[12] : receiver
1920 // -----------------------------------
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001921
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001922 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1923 // remove all arguments from the stack (including the receiver), and push
1924 // thisArgument (if present) instead.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001925 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001926 Label skip;
1927 Register arg_size = r5;
1928 Register new_sp = r6;
1929 Register scratch = r7;
1930 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1931 __ add(new_sp, sp, arg_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001932 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933 __ mr(scratch, r4);
1934 __ mr(r3, r4);
1935 __ cmpi(arg_size, Operand(kPointerSize));
1936 __ blt(&skip);
1937 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1938 __ beq(&skip);
1939 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1940 __ cmpi(arg_size, Operand(2 * kPointerSize));
1941 __ beq(&skip);
1942 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1943 __ bind(&skip);
1944 __ mr(sp, new_sp);
1945 __ StoreP(scratch, MemOperand(sp, 0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001946 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001947
1948 // ----------- S t a t e -------------
1949 // -- r3 : argumentsList
1950 // -- r4 : target
1951 // -- sp[0] : thisArgument
1952 // -----------------------------------
1953
1954 // 2. Make sure the target is actually callable.
1955 Label target_not_callable;
1956 __ JumpIfSmi(r4, &target_not_callable);
1957 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1958 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1959 __ TestBit(r7, Map::kIsCallable, r0);
1960 __ beq(&target_not_callable, cr0);
1961
1962 // 3a. Apply the target to the given argumentsList (passing undefined for
1963 // new.target).
1964 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1965 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1966
1967 // 3b. The target is not callable, throw an appropriate TypeError.
1968 __ bind(&target_not_callable);
1969 {
1970 __ StoreP(r4, MemOperand(sp, 0));
1971 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1972 }
1973}
1974
1975
1976void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1977 // ----------- S t a t e -------------
1978 // -- r3 : argc
1979 // -- sp[0] : new.target (optional)
1980 // -- sp[4] : argumentsList
1981 // -- sp[8] : target
1982 // -- sp[12] : receiver
1983 // -----------------------------------
1984
1985 // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1986 // new.target into r6 (if present, otherwise use target), remove all
1987 // arguments from the stack (including the receiver), and push thisArgument
1988 // (if present) instead.
1989 {
1990 Label skip;
1991 Register arg_size = r5;
1992 Register new_sp = r7;
1993 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1994 __ add(new_sp, sp, arg_size);
1995 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1996 __ mr(r3, r4);
1997 __ mr(r6, r4);
1998 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
1999 __ cmpi(arg_size, Operand(kPointerSize));
2000 __ blt(&skip);
2001 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
2002 __ mr(r6, r4); // new.target defaults to target
2003 __ beq(&skip);
2004 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
2005 __ cmpi(arg_size, Operand(2 * kPointerSize));
2006 __ beq(&skip);
2007 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
2008 __ bind(&skip);
2009 __ mr(sp, new_sp);
2010 }
2011
2012 // ----------- S t a t e -------------
2013 // -- r3 : argumentsList
2014 // -- r6 : new.target
2015 // -- r4 : target
2016 // -- sp[0] : receiver (undefined)
2017 // -----------------------------------
2018
2019 // 2. Make sure the target is actually a constructor.
2020 Label target_not_constructor;
2021 __ JumpIfSmi(r4, &target_not_constructor);
2022 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2023 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2024 __ TestBit(r7, Map::kIsConstructor, r0);
2025 __ beq(&target_not_constructor, cr0);
2026
2027 // 3. Make sure the target is actually a constructor.
2028 Label new_target_not_constructor;
2029 __ JumpIfSmi(r6, &new_target_not_constructor);
2030 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
2031 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2032 __ TestBit(r7, Map::kIsConstructor, r0);
2033 __ beq(&new_target_not_constructor, cr0);
2034
2035 // 4a. Construct the target with the given new.target and argumentsList.
2036 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2037
2038 // 4b. The target is not a constructor, throw an appropriate TypeError.
2039 __ bind(&target_not_constructor);
2040 {
2041 __ StoreP(r4, MemOperand(sp, 0));
2042 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2043 }
2044
2045 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2046 __ bind(&new_target_not_constructor);
2047 {
2048 __ StoreP(r6, MemOperand(sp, 0));
2049 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2050 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002051}
2052
2053
2054static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2055 Label* stack_overflow) {
2056 // ----------- S t a t e -------------
2057 // -- r3 : actual number of arguments
2058 // -- r4 : function (passed through to callee)
2059 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002060 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002061 // -----------------------------------
2062 // Check the stack for overflow. We are not trying to catch
2063 // interruptions (e.g. debug break and preemption) here, so the "real stack
2064 // limit" is checked.
2065 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
2066 // Make r8 the space we have left. The stack might already be overflowed
2067 // here which will cause r8 to become negative.
2068 __ sub(r8, sp, r8);
2069 // Check if the arguments will overflow the stack.
2070 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
2071 __ cmp(r8, r0);
2072 __ ble(stack_overflow); // Signed comparison.
2073}
2074
2075
2076static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2077 __ SmiTag(r3);
2078 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2079 __ mflr(r0);
2080 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002081 if (FLAG_enable_embedded_constant_pool) {
2082 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
2083 } else {
2084 __ Push(fp, r7, r4, r3);
2085 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002086 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2087 kPointerSize));
2088}
2089
2090
2091static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2092 // ----------- S t a t e -------------
2093 // -- r3 : result being passed through
2094 // -----------------------------------
2095 // Get the number of arguments passed (as a smi), tear down the frame and
2096 // then tear down the parameters.
2097 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2098 kPointerSize)));
2099 int stack_adjustment = kPointerSize; // adjust for receiver
2100 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
2101 __ SmiToPtrArrayOffset(r0, r4);
2102 __ add(sp, sp, r0);
2103}
2104
2105
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002106// static
2107void Builtins::Generate_Apply(MacroAssembler* masm) {
2108 // ----------- S t a t e -------------
2109 // -- r3 : argumentsList
2110 // -- r4 : target
2111 // -- r6 : new.target (checked to be constructor or undefined)
2112 // -- sp[0] : thisArgument
2113 // -----------------------------------
2114
2115 // Create the list of arguments from the array-like argumentsList.
2116 {
2117 Label create_arguments, create_array, create_runtime, done_create;
2118 __ JumpIfSmi(r3, &create_runtime);
2119
2120 // Load the map of argumentsList into r5.
2121 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
2122
2123 // Load native context into r7.
2124 __ LoadP(r7, NativeContextMemOperand());
2125
2126 // Check if argumentsList is an (unmodified) arguments object.
2127 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2128 __ cmp(ip, r5);
2129 __ beq(&create_arguments);
2130 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
2131 __ cmp(ip, r5);
2132 __ beq(&create_arguments);
2133
2134 // Check if argumentsList is a fast JSArray.
2135 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
2136 __ beq(&create_array);
2137
2138 // Ask the runtime to create the list (actually a FixedArray).
2139 __ bind(&create_runtime);
2140 {
2141 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2142 __ Push(r4, r6, r3);
2143 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2144 __ Pop(r4, r6);
2145 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
2146 __ SmiUntag(r5);
2147 }
2148 __ b(&done_create);
2149
2150 // Try to create the list from an arguments object.
2151 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002152 __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002153 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
2154 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
2155 __ cmp(r5, ip);
2156 __ bne(&create_runtime);
2157 __ SmiUntag(r5);
2158 __ mr(r3, r7);
2159 __ b(&done_create);
2160
2161 // Try to create the list from a JSArray object.
2162 __ bind(&create_array);
2163 __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset));
2164 __ DecodeField<Map::ElementsKindBits>(r5);
2165 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2166 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2167 STATIC_ASSERT(FAST_ELEMENTS == 2);
2168 __ cmpi(r5, Operand(FAST_ELEMENTS));
2169 __ bgt(&create_runtime);
2170 __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS));
2171 __ beq(&create_runtime);
2172 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
2173 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
2174 __ SmiUntag(r5);
2175
2176 __ bind(&done_create);
2177 }
2178
2179 // Check for stack overflow.
2180 {
2181 // Check the stack for overflow. We are not trying to catch interruptions
2182 // (i.e. debug break and preemption) here, so check the "real stack limit".
2183 Label done;
2184 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2185 // Make ip the space we have left. The stack might already be overflowed
2186 // here which will cause ip to become negative.
2187 __ sub(ip, sp, ip);
2188 // Check if the arguments will overflow the stack.
2189 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
2190 __ cmp(ip, r0); // Signed comparison.
2191 __ bgt(&done);
2192 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2193 __ bind(&done);
2194 }
2195
2196 // ----------- S t a t e -------------
2197 // -- r4 : target
2198 // -- r3 : args (a FixedArray built from argumentsList)
2199 // -- r5 : len (number of elements to push from args)
2200 // -- r6 : new.target (checked to be constructor or undefined)
2201 // -- sp[0] : thisArgument
2202 // -----------------------------------
2203
2204 // Push arguments onto the stack (thisArgument is already on the stack).
2205 {
2206 Label loop, no_args;
2207 __ cmpi(r5, Operand::Zero());
2208 __ beq(&no_args);
2209 __ addi(r3, r3,
2210 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
2211 __ mtctr(r5);
2212 __ bind(&loop);
2213 __ LoadPU(r0, MemOperand(r3, kPointerSize));
2214 __ push(r0);
2215 __ bdnz(&loop);
2216 __ bind(&no_args);
2217 __ mr(r3, r5);
2218 }
2219
2220 // Dispatch to Call or Construct depending on whether new.target is undefined.
2221 {
2222 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
2223 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2224 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2225 }
2226}
2227
Ben Murdoch097c5b22016-05-18 11:27:45 +01002228namespace {
2229
2230// Drops top JavaScript frame and an arguments adaptor frame below it (if
2231// present) preserving all the arguments prepared for current call.
2232// Does nothing if debugger is currently active.
2233// ES6 14.6.3. PrepareForTailCall
2234//
2235// Stack structure for the function g() tail calling f():
2236//
2237// ------- Caller frame: -------
2238// | ...
2239// | g()'s arg M
2240// | ...
2241// | g()'s arg 1
2242// | g()'s receiver arg
2243// | g()'s caller pc
2244// ------- g()'s frame: -------
2245// | g()'s caller fp <- fp
2246// | g()'s context
2247// | function pointer: g
2248// | -------------------------
2249// | ...
2250// | ...
2251// | f()'s arg N
2252// | ...
2253// | f()'s arg 1
2254// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2255// ----------------------
2256//
2257void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2258 Register scratch1, Register scratch2,
2259 Register scratch3) {
2260 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2261 Comment cmnt(masm, "[ PrepareForTailCall");
2262
Ben Murdochda12d292016-06-02 14:46:10 +01002263 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002264 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002265 ExternalReference is_tail_call_elimination_enabled =
2266 ExternalReference::is_tail_call_elimination_enabled_address(
2267 masm->isolate());
2268 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002269 __ lbz(scratch1, MemOperand(scratch1));
2270 __ cmpi(scratch1, Operand::Zero());
Ben Murdochda12d292016-06-02 14:46:10 +01002271 __ beq(&done);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002272
2273 // Drop possible interpreter handler/stub frame.
2274 {
2275 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002276 __ LoadP(scratch3,
2277 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002278 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
2279 __ bne(&no_interpreter_frame);
2280 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2281 __ bind(&no_interpreter_frame);
2282 }
2283
2284 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002285 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002286 Label no_arguments_adaptor, formal_parameter_count_loaded;
2287 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002288 __ LoadP(
2289 scratch3,
2290 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002291 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2292 __ bne(&no_arguments_adaptor);
2293
Ben Murdochda12d292016-06-02 14:46:10 +01002294 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002295 __ mr(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002296 __ LoadP(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002297 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002298 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002299 __ b(&formal_parameter_count_loaded);
2300
2301 __ bind(&no_arguments_adaptor);
2302 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01002303 __ LoadP(scratch1,
2304 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002305 __ LoadP(scratch1,
2306 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2307 __ LoadWordArith(
Ben Murdochda12d292016-06-02 14:46:10 +01002308 caller_args_count_reg,
2309 FieldMemOperand(scratch1,
2310 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002311#if !V8_TARGET_ARCH_PPC64
Ben Murdochda12d292016-06-02 14:46:10 +01002312 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002313#endif
2314
2315 __ bind(&formal_parameter_count_loaded);
2316
Ben Murdochda12d292016-06-02 14:46:10 +01002317 ParameterCount callee_args_count(args_reg);
2318 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2319 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002320 __ bind(&done);
2321}
2322} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002323
2324// static
2325void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002326 ConvertReceiverMode mode,
2327 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002328 // ----------- S t a t e -------------
2329 // -- r3 : the number of arguments (not including the receiver)
2330 // -- r4 : the function to call (checked to be a JSFunction)
2331 // -----------------------------------
2332 __ AssertFunction(r4);
2333
2334 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2335 // Check that the function is not a "classConstructor".
2336 Label class_constructor;
2337 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2338 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
2339 __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0);
2340 __ bne(&class_constructor, cr0);
2341
2342 // Enter the context of the function; ToObject has to run in the function
2343 // context, and we also need to take the global proxy from the function
2344 // context in case of conversion.
2345 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2346 // We need to convert the receiver for non-native sloppy mode functions.
2347 Label done_convert;
2348 __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2349 (1 << SharedFunctionInfo::kNativeBit)));
2350 __ bne(&done_convert, cr0);
2351 {
2352 // ----------- S t a t e -------------
2353 // -- r3 : the number of arguments (not including the receiver)
2354 // -- r4 : the function to call (checked to be a JSFunction)
2355 // -- r5 : the shared function info.
2356 // -- cp : the function context.
2357 // -----------------------------------
2358
2359 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2360 // Patch receiver to global proxy.
2361 __ LoadGlobalProxy(r6);
2362 } else {
2363 Label convert_to_object, convert_receiver;
2364 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
2365 __ LoadPX(r6, MemOperand(sp, r6));
2366 __ JumpIfSmi(r6, &convert_to_object);
2367 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2368 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2369 __ bge(&done_convert);
2370 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2371 Label convert_global_proxy;
2372 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
2373 &convert_global_proxy);
2374 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
2375 __ bind(&convert_global_proxy);
2376 {
2377 // Patch receiver to global proxy.
2378 __ LoadGlobalProxy(r6);
2379 }
2380 __ b(&convert_receiver);
2381 }
2382 __ bind(&convert_to_object);
2383 {
2384 // Convert receiver using ToObject.
2385 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2386 // in the fast case? (fall back to AllocateInNewSpace?)
2387 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2388 __ SmiTag(r3);
2389 __ Push(r3, r4);
2390 __ mr(r3, r6);
2391 ToObjectStub stub(masm->isolate());
2392 __ CallStub(&stub);
2393 __ mr(r6, r3);
2394 __ Pop(r3, r4);
2395 __ SmiUntag(r3);
2396 }
2397 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2398 __ bind(&convert_receiver);
2399 }
2400 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2401 __ StorePX(r6, MemOperand(sp, r7));
2402 }
2403 __ bind(&done_convert);
2404
2405 // ----------- S t a t e -------------
2406 // -- r3 : the number of arguments (not including the receiver)
2407 // -- r4 : the function to call (checked to be a JSFunction)
2408 // -- r5 : the shared function info.
2409 // -- cp : the function context.
2410 // -----------------------------------
2411
Ben Murdoch097c5b22016-05-18 11:27:45 +01002412 if (tail_call_mode == TailCallMode::kAllow) {
2413 PrepareForTailCall(masm, r3, r6, r7, r8);
2414 }
2415
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002416 __ LoadWordArith(
2417 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2418#if !V8_TARGET_ARCH_PPC64
2419 __ SmiUntag(r5);
2420#endif
2421 ParameterCount actual(r3);
2422 ParameterCount expected(r5);
2423 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION,
2424 CheckDebugStepCallWrapper());
2425
2426 // The function is a "classConstructor", need to raise an exception.
2427 __ bind(&class_constructor);
2428 {
2429 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2430 __ push(r4);
2431 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2432 }
2433}
2434
2435
2436namespace {
2437
2438void Generate_PushBoundArguments(MacroAssembler* masm) {
2439 // ----------- S t a t e -------------
2440 // -- r3 : the number of arguments (not including the receiver)
2441 // -- r4 : target (checked to be a JSBoundFunction)
2442 // -- r6 : new.target (only in case of [[Construct]])
2443 // -----------------------------------
2444
2445 // Load [[BoundArguments]] into r5 and length of that into r7.
2446 Label no_bound_arguments;
2447 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2448 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2449 __ SmiUntag(r7, SetRC);
2450 __ beq(&no_bound_arguments, cr0);
2451 {
2452 // ----------- S t a t e -------------
2453 // -- r3 : the number of arguments (not including the receiver)
2454 // -- r4 : target (checked to be a JSBoundFunction)
2455 // -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2456 // -- r6 : new.target (only in case of [[Construct]])
2457 // -- r7 : the number of [[BoundArguments]]
2458 // -----------------------------------
2459
2460 // Reserve stack space for the [[BoundArguments]].
2461 {
2462 Label done;
2463 __ mr(r9, sp); // preserve previous stack pointer
2464 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2465 __ sub(sp, sp, r10);
2466 // Check the stack for overflow. We are not trying to catch interruptions
2467 // (i.e. debug break and preemption) here, so check the "real stack
2468 // limit".
2469 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2470 __ bgt(&done); // Signed comparison.
2471 // Restore the stack pointer.
2472 __ mr(sp, r9);
2473 {
2474 FrameScope scope(masm, StackFrame::MANUAL);
2475 __ EnterFrame(StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002476 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002477 }
2478 __ bind(&done);
2479 }
2480
2481 // Relocate arguments down the stack.
2482 // -- r3 : the number of arguments (not including the receiver)
2483 // -- r9 : the previous stack pointer
2484 // -- r10: the size of the [[BoundArguments]]
2485 {
2486 Label skip, loop;
2487 __ li(r8, Operand::Zero());
2488 __ cmpi(r3, Operand::Zero());
2489 __ beq(&skip);
2490 __ mtctr(r3);
2491 __ bind(&loop);
2492 __ LoadPX(r0, MemOperand(r9, r8));
2493 __ StorePX(r0, MemOperand(sp, r8));
2494 __ addi(r8, r8, Operand(kPointerSize));
2495 __ bdnz(&loop);
2496 __ bind(&skip);
2497 }
2498
2499 // Copy [[BoundArguments]] to the stack (below the arguments).
2500 {
2501 Label loop;
2502 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2503 __ add(r5, r5, r10);
2504 __ mtctr(r7);
2505 __ bind(&loop);
2506 __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2507 __ StorePX(r0, MemOperand(sp, r8));
2508 __ addi(r8, r8, Operand(kPointerSize));
2509 __ bdnz(&loop);
2510 __ add(r3, r3, r7);
2511 }
2512 }
2513 __ bind(&no_bound_arguments);
2514}
2515
2516} // namespace
2517
2518
2519// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002520void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2521 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002522 // ----------- S t a t e -------------
2523 // -- r3 : the number of arguments (not including the receiver)
2524 // -- r4 : the function to call (checked to be a JSBoundFunction)
2525 // -----------------------------------
2526 __ AssertBoundFunction(r4);
2527
Ben Murdoch097c5b22016-05-18 11:27:45 +01002528 if (tail_call_mode == TailCallMode::kAllow) {
2529 PrepareForTailCall(masm, r3, r6, r7, r8);
2530 }
2531
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002532 // Patch the receiver to [[BoundThis]].
2533 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2534 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2535 __ StorePX(ip, MemOperand(sp, r0));
2536
2537 // Push the [[BoundArguments]] onto the stack.
2538 Generate_PushBoundArguments(masm);
2539
2540 // Call the [[BoundTargetFunction]] via the Call builtin.
2541 __ LoadP(r4,
2542 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2543 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2544 masm->isolate())));
2545 __ LoadP(ip, MemOperand(ip));
2546 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2547 __ JumpToJSEntry(ip);
2548}
2549
2550
2551// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002552void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2553 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002554 // ----------- S t a t e -------------
2555 // -- r3 : the number of arguments (not including the receiver)
2556 // -- r4 : the target to call (can be any Object).
2557 // -----------------------------------
2558
2559 Label non_callable, non_function, non_smi;
2560 __ JumpIfSmi(r4, &non_callable);
2561 __ bind(&non_smi);
2562 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002563 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002564 RelocInfo::CODE_TARGET, eq);
2565 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002566 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002567 RelocInfo::CODE_TARGET, eq);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002568
2569 // Check if target has a [[Call]] internal method.
2570 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2571 __ TestBit(r7, Map::kIsCallable, r0);
2572 __ beq(&non_callable, cr0);
2573
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002574 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2575 __ bne(&non_function);
2576
Ben Murdoch097c5b22016-05-18 11:27:45 +01002577 // 0. Prepare for tail call if necessary.
2578 if (tail_call_mode == TailCallMode::kAllow) {
2579 PrepareForTailCall(masm, r3, r6, r7, r8);
2580 }
2581
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002582 // 1. Runtime fallback for Proxy [[Call]].
2583 __ Push(r4);
2584 // Increase the arguments size to include the pushed function and the
2585 // existing receiver on the stack.
2586 __ addi(r3, r3, Operand(2));
2587 // Tail-call to the runtime.
2588 __ JumpToExternalReference(
2589 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2590
2591 // 2. Call to something else, which might have a [[Call]] internal method (if
2592 // not we raise an exception).
2593 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002594 // Overwrite the original receiver the (original) target.
2595 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2596 __ StorePX(r4, MemOperand(sp, r8));
2597 // Let the "call_as_function_delegate" take care of the rest.
2598 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2599 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002600 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002601 RelocInfo::CODE_TARGET);
2602
2603 // 3. Call to something that is not callable.
2604 __ bind(&non_callable);
2605 {
2606 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2607 __ Push(r4);
2608 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2609 }
2610}
2611
2612
2613// static
2614void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2615 // ----------- S t a t e -------------
2616 // -- r3 : the number of arguments (not including the receiver)
2617 // -- r4 : the constructor to call (checked to be a JSFunction)
2618 // -- r6 : the new target (checked to be a constructor)
2619 // -----------------------------------
2620 __ AssertFunction(r4);
2621
2622 // Calling convention for function specific ConstructStubs require
2623 // r5 to contain either an AllocationSite or undefined.
2624 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2625
2626 // Tail call to the function-specific construct stub (still in the caller
2627 // context at this point).
2628 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2629 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2630 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2631 __ JumpToJSEntry(ip);
2632}
2633
2634
2635// static
2636void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2637 // ----------- S t a t e -------------
2638 // -- r3 : the number of arguments (not including the receiver)
2639 // -- r4 : the function to call (checked to be a JSBoundFunction)
2640 // -- r6 : the new target (checked to be a constructor)
2641 // -----------------------------------
2642 __ AssertBoundFunction(r4);
2643
2644 // Push the [[BoundArguments]] onto the stack.
2645 Generate_PushBoundArguments(masm);
2646
2647 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2648 Label skip;
2649 __ cmp(r4, r6);
2650 __ bne(&skip);
2651 __ LoadP(r6,
2652 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2653 __ bind(&skip);
2654
2655 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2656 __ LoadP(r4,
2657 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2658 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2659 __ LoadP(ip, MemOperand(ip));
2660 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2661 __ JumpToJSEntry(ip);
2662}
2663
2664
2665// static
2666void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2667 // ----------- S t a t e -------------
2668 // -- r3 : the number of arguments (not including the receiver)
2669 // -- r4 : the constructor to call (checked to be a JSProxy)
2670 // -- r6 : the new target (either the same as the constructor or
2671 // the JSFunction on which new was invoked initially)
2672 // -----------------------------------
2673
2674 // Call into the Runtime for Proxy [[Construct]].
2675 __ Push(r4, r6);
2676 // Include the pushed new_target, constructor and the receiver.
2677 __ addi(r3, r3, Operand(3));
2678 // Tail-call to the runtime.
2679 __ JumpToExternalReference(
2680 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2681}
2682
2683
2684// static
2685void Builtins::Generate_Construct(MacroAssembler* masm) {
2686 // ----------- S t a t e -------------
2687 // -- r3 : the number of arguments (not including the receiver)
2688 // -- r4 : the constructor to call (can be any Object)
2689 // -- r6 : the new target (either the same as the constructor or
2690 // the JSFunction on which new was invoked initially)
2691 // -----------------------------------
2692
2693 // Check if target is a Smi.
2694 Label non_constructor;
2695 __ JumpIfSmi(r4, &non_constructor);
2696
2697 // Dispatch based on instance type.
2698 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2699 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2700 RelocInfo::CODE_TARGET, eq);
2701
2702 // Check if target has a [[Construct]] internal method.
2703 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2704 __ TestBit(r5, Map::kIsConstructor, r0);
2705 __ beq(&non_constructor, cr0);
2706
2707 // Only dispatch to bound functions after checking whether they are
2708 // constructors.
2709 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2710 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2711 RelocInfo::CODE_TARGET, eq);
2712
2713 // Only dispatch to proxies after checking whether they are constructors.
2714 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2715 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2716 eq);
2717
2718 // Called Construct on an exotic Object with a [[Construct]] internal method.
2719 {
2720 // Overwrite the original receiver with the (original) target.
2721 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2722 __ StorePX(r4, MemOperand(sp, r8));
2723 // Let the "call_as_constructor_delegate" take care of the rest.
2724 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2725 __ Jump(masm->isolate()->builtins()->CallFunction(),
2726 RelocInfo::CODE_TARGET);
2727 }
2728
2729 // Called Construct on an Object that doesn't have a [[Construct]] internal
2730 // method.
2731 __ bind(&non_constructor);
2732 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2733 RelocInfo::CODE_TARGET);
2734}
2735
Ben Murdochc5610432016-08-08 18:44:38 +01002736// static
2737void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2738 // ----------- S t a t e -------------
2739 // -- r4 : requested object size (untagged)
2740 // -- lr : return address
2741 // -----------------------------------
2742 __ SmiTag(r4);
2743 __ Push(r4);
2744 __ LoadSmiLiteral(cp, Smi::FromInt(0));
2745 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2746}
2747
2748// static
2749void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2750 // ----------- S t a t e -------------
2751 // -- r4 : requested object size (untagged)
2752 // -- lr : return address
2753 // -----------------------------------
2754 __ SmiTag(r4);
2755 __ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2756 __ Push(r4, r5);
2757 __ LoadSmiLiteral(cp, Smi::FromInt(0));
2758 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2759}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002760
Ben Murdoch61f157c2016-09-16 13:49:30 +01002761// static
2762void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2763 // The StringToNumber stub takes one argument in r3.
2764 __ AssertString(r3);
2765
2766 // Check if string has a cached array index.
2767 Label runtime;
2768 __ lwz(r5, FieldMemOperand(r3, String::kHashFieldOffset));
2769 __ And(r0, r5, Operand(String::kContainsCachedArrayIndexMask), SetRC);
2770 __ bne(&runtime, cr0);
2771 __ IndexFromHash(r5, r3);
2772 __ blr();
2773
2774 __ bind(&runtime);
2775 {
2776 FrameScope frame(masm, StackFrame::INTERNAL);
2777 // Push argument.
2778 __ push(r3);
2779 // We cannot use a tail call here because this builtin can also be called
2780 // from wasm.
2781 __ CallRuntime(Runtime::kStringToNumber);
2782 }
2783 __ Ret();
2784}
2785
2786// static
2787void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2788 // The ToNumber stub takes one argument in r3.
2789 STATIC_ASSERT(kSmiTag == 0);
2790 __ TestIfSmi(r3, r0);
2791 __ Ret(eq, cr0);
2792
2793 __ CompareObjectType(r3, r4, r4, HEAP_NUMBER_TYPE);
2794 // r3: receiver
2795 // r4: receiver instance type
2796 __ Ret(eq);
2797
2798 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2799 RelocInfo::CODE_TARGET);
2800}
2801
2802// static
2803void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2804 // The NonNumberToNumber stub takes one argument in r3.
2805 __ AssertNotNumber(r3);
2806
2807 __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
2808 // r3: receiver
2809 // r4: receiver instance type
2810 __ Jump(masm->isolate()->builtins()->StringToNumber(), RelocInfo::CODE_TARGET,
2811 lt);
2812
2813 Label not_oddball;
2814 __ cmpi(r4, Operand(ODDBALL_TYPE));
2815 __ bne(&not_oddball);
2816 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset));
2817 __ blr();
2818 __ bind(&not_oddball);
2819
2820 {
2821 FrameScope frame(masm, StackFrame::INTERNAL);
2822 // Push argument.
2823 __ push(r3);
2824 // We cannot use a tail call here because this builtin can also be called
2825 // from wasm.
2826 __ CallRuntime(Runtime::kToNumber);
2827 }
2828 __ Ret();
2829}
2830
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002831void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2832 // ----------- S t a t e -------------
2833 // -- r3 : actual number of arguments
2834 // -- r4 : function (passed through to callee)
2835 // -- r5 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002836 // -- r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002837 // -----------------------------------
2838
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002839 Label invoke, dont_adapt_arguments, stack_overflow;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002840
2841 Label enough, too_few;
2842 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2843 __ cmp(r3, r5);
2844 __ blt(&too_few);
2845 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2846 __ beq(&dont_adapt_arguments);
2847
2848 { // Enough parameters: actual >= expected
2849 __ bind(&enough);
2850 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002851 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002852
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002853 // Calculate copy start address into r3 and copy end address into r7.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002854 // r3: actual number of arguments as a smi
2855 // r4: function
2856 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002857 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002858 // ip: code entry to call
2859 __ SmiToPtrArrayOffset(r3, r3);
2860 __ add(r3, r3, fp);
2861 // adjust for return address and receiver
2862 __ addi(r3, r3, Operand(2 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002863 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2864 __ sub(r7, r3, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002865
2866 // Copy the arguments (including the receiver) to the new stack frame.
2867 // r3: copy start address
2868 // r4: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002869 // r5: expected number of arguments
2870 // r6: new target (passed through to callee)
2871 // r7: copy end address
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002872 // ip: code entry to call
2873
2874 Label copy;
2875 __ bind(&copy);
2876 __ LoadP(r0, MemOperand(r3, 0));
2877 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002878 __ cmp(r3, r7); // Compare before moving to next argument.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002879 __ subi(r3, r3, Operand(kPointerSize));
2880 __ bne(&copy);
2881
2882 __ b(&invoke);
2883 }
2884
2885 { // Too few parameters: Actual < expected
2886 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002887
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002888 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002889 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002890
2891 // Calculate copy start address into r0 and copy end address is fp.
2892 // r3: actual number of arguments as a smi
2893 // r4: function
2894 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002895 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002896 // ip: code entry to call
2897 __ SmiToPtrArrayOffset(r3, r3);
2898 __ add(r3, r3, fp);
2899
2900 // Copy the arguments (including the receiver) to the new stack frame.
2901 // r3: copy start address
2902 // r4: function
2903 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002904 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002905 // ip: code entry to call
2906 Label copy;
2907 __ bind(&copy);
2908 // Adjust load for return address and receiver.
2909 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2910 __ push(r0);
2911 __ cmp(r3, fp); // Compare before moving to next argument.
2912 __ subi(r3, r3, Operand(kPointerSize));
2913 __ bne(&copy);
2914
2915 // Fill the remaining expected arguments with undefined.
2916 // r4: function
2917 // r5: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002918 // r6: new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002919 // ip: code entry to call
2920 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002921 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2922 __ sub(r7, fp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002923 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002924 __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002925 2 * kPointerSize));
2926
2927 Label fill;
2928 __ bind(&fill);
2929 __ push(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002930 __ cmp(sp, r7);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002931 __ bne(&fill);
2932 }
2933
2934 // Call the entry point.
2935 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002936 __ mr(r3, r5);
2937 // r3 : expected number of arguments
2938 // r4 : function (passed through to callee)
2939 // r6 : new target (passed through to callee)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002940 __ CallJSEntry(ip);
2941
2942 // Store offset of return address for deoptimizer.
2943 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2944
2945 // Exit frame and return.
2946 LeaveArgumentsAdaptorFrame(masm);
2947 __ blr();
2948
2949
2950 // -------------------------------------------
2951 // Dont adapt arguments.
2952 // -------------------------------------------
2953 __ bind(&dont_adapt_arguments);
2954 __ JumpToJSEntry(ip);
2955
2956 __ bind(&stack_overflow);
2957 {
2958 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002959 __ CallRuntime(Runtime::kThrowStackOverflow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002960 __ bkpt(0);
2961 }
2962}
2963
2964
2965#undef __
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002966} // namespace internal
2967} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002968
2969#endif // V8_TARGET_ARCH_PPC