blob: 7817fcd0f67319dd77457a2930f9ee532a320e09 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_PPC
8
9#include "src/codegen.h"
10#include "src/debug.h"
11#include "src/deoptimizer.h"
12#include "src/full-codegen.h"
13#include "src/runtime/runtime.h"
14
15namespace v8 {
16namespace internal {
17
18
19#define __ ACCESS_MASM(masm)
20
21
22void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- r3 : number of arguments excluding receiver
26 // -- r4 : called function (only guaranteed when
27 // extra_args requires it)
28 // -- cp : context
29 // -- sp[0] : last argument
30 // -- ...
31 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
32 // -- sp[4 * argc] : receiver
33 // -----------------------------------
34
35 // Insert extra arguments.
36 int num_extra_args = 0;
37 if (extra_args == NEEDS_CALLED_FUNCTION) {
38 num_extra_args = 1;
39 __ push(r4);
40 } else {
41 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
42 }
43
44 // JumpToExternalReference expects r0 to contain the number of arguments
45 // including the receiver and the extra arguments.
46 __ addi(r3, r3, Operand(num_extra_args + 1));
47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
48}
49
50
51// Load the built-in InternalArray function from the current context.
52static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53 Register result) {
54 // Load the native context.
55
56 __ LoadP(result,
57 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
58 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
59 // Load the InternalArray function from the native context.
60 __ LoadP(result,
61 MemOperand(result, Context::SlotOffset(
62 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
63}
64
65
66// Load the built-in Array function from the current context.
67static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
68 // Load the native context.
69
70 __ LoadP(result,
71 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
72 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
73 // Load the Array function from the native context.
74 __ LoadP(
75 result,
76 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
77}
78
79
80void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
81 // ----------- S t a t e -------------
82 // -- r3 : number of arguments
83 // -- lr : return address
84 // -- sp[...]: constructor arguments
85 // -----------------------------------
86 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
87
88 // Get the InternalArray function.
89 GenerateLoadInternalArrayFunction(masm, r4);
90
91 if (FLAG_debug_code) {
92 // Initial map for the builtin InternalArray functions should be maps.
93 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
94 __ TestIfSmi(r5, r0);
95 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
96 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
97 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
98 }
99
100 // Run the native code for the InternalArray function called as a normal
101 // function.
102 // tail call a stub
103 InternalArrayConstructorStub stub(masm->isolate());
104 __ TailCallStub(&stub);
105}
106
107
108void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
109 // ----------- S t a t e -------------
110 // -- r3 : number of arguments
111 // -- lr : return address
112 // -- sp[...]: constructor arguments
113 // -----------------------------------
114 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
115
116 // Get the Array function.
117 GenerateLoadArrayFunction(masm, r4);
118
119 if (FLAG_debug_code) {
120 // Initial map for the builtin Array functions should be maps.
121 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
122 __ TestIfSmi(r5, r0);
123 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
124 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
125 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
126 }
127
128 // Run the native code for the Array function called as a normal function.
129 // tail call a stub
130 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
131 ArrayConstructorStub stub(masm->isolate());
132 __ TailCallStub(&stub);
133}
134
135
136void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
137 // ----------- S t a t e -------------
138 // -- r3 : number of arguments
139 // -- r4 : constructor function
140 // -- lr : return address
141 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
142 // -- sp[argc * 4] : receiver
143 // -----------------------------------
144 Counters* counters = masm->isolate()->counters();
145 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6);
146
147 Register function = r4;
148 if (FLAG_debug_code) {
149 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5);
150 __ cmp(function, r5);
151 __ Assert(eq, kUnexpectedStringFunction);
152 }
153
154 // Load the first arguments in r3 and get rid of the rest.
155 Label no_arguments;
156 __ cmpi(r3, Operand::Zero());
157 __ beq(&no_arguments);
158 // First args = sp[(argc - 1) * 4].
159 __ subi(r3, r3, Operand(1));
160 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
161 __ add(sp, sp, r3);
162 __ LoadP(r3, MemOperand(sp));
163 // sp now point to args[0], drop args[0] + receiver.
164 __ Drop(2);
165
166 Register argument = r5;
167 Label not_cached, argument_is_string;
168 __ LookupNumberStringCache(r3, // Input.
169 argument, // Result.
170 r6, // Scratch.
171 r7, // Scratch.
172 r8, // Scratch.
173 &not_cached);
174 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7);
175 __ bind(&argument_is_string);
176
177 // ----------- S t a t e -------------
178 // -- r5 : argument converted to string
179 // -- r4 : constructor function
180 // -- lr : return address
181 // -----------------------------------
182
183 Label gc_required;
184 __ Allocate(JSValue::kSize,
185 r3, // Result.
186 r6, // Scratch.
187 r7, // Scratch.
188 &gc_required, TAG_OBJECT);
189
190 // Initialising the String Object.
191 Register map = r6;
192 __ LoadGlobalFunctionInitialMap(function, map, r7);
193 if (FLAG_debug_code) {
194 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset));
195 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2));
196 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
197 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
198 __ cmpi(r7, Operand::Zero());
199 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
200 }
201 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
202
203 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
204 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
205 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
206
207 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0);
208
209 // Ensure the object is fully initialized.
210 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
211
212 __ Ret();
213
214 // The argument was not found in the number to string cache. Check
215 // if it's a string already before calling the conversion builtin.
216 Label convert_argument;
217 __ bind(&not_cached);
218 __ JumpIfSmi(r3, &convert_argument);
219
220 // Is it a String?
221 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
222 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset));
223 STATIC_ASSERT(kNotStringTag != 0);
224 __ andi(r0, r6, Operand(kIsNotStringMask));
225 __ bne(&convert_argument, cr0);
226 __ mr(argument, r3);
227 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
228 __ b(&argument_is_string);
229
230 // Invoke the conversion builtin and put the result into r5.
231 __ bind(&convert_argument);
232 __ push(function); // Preserve the function.
233 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
234 {
235 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
236 __ push(r3);
237 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
238 }
239 __ pop(function);
240 __ mr(argument, r3);
241 __ b(&argument_is_string);
242
243 // Load the empty string into r5, remove the receiver from the
244 // stack, and jump back to the case where the argument is a string.
245 __ bind(&no_arguments);
246 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
247 __ Drop(1);
248 __ b(&argument_is_string);
249
250 // At this point the argument is already a string. Call runtime to
251 // create a string wrapper.
252 __ bind(&gc_required);
253 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
254 {
255 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
256 __ push(argument);
257 __ CallRuntime(Runtime::kNewStringWrapper, 1);
258 }
259 __ Ret();
260}
261
262
263static void CallRuntimePassFunction(MacroAssembler* masm,
264 Runtime::FunctionId function_id) {
265 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
266 // Push a copy of the function onto the stack.
267 // Push function as parameter to the runtime call.
268 __ Push(r4, r4);
269
270 __ CallRuntime(function_id, 1);
271 // Restore reciever.
272 __ Pop(r4);
273}
274
275
276static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
277 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
278 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
279 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
280 __ JumpToJSEntry(ip);
281}
282
283
284static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
285 __ addi(ip, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
286 __ JumpToJSEntry(ip);
287}
288
289
290void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
291 // Checking whether the queued function is ready for install is optional,
292 // since we come across interrupts and stack checks elsewhere. However,
293 // not checking may delay installing ready functions, and always checking
294 // would be quite expensive. A good compromise is to first check against
295 // stack limit as a cue for an interrupt signal.
296 Label ok;
297 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
298 __ cmpl(sp, ip);
299 __ bge(&ok);
300
301 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
302 GenerateTailCallToReturnedCode(masm);
303
304 __ bind(&ok);
305 GenerateTailCallToSharedCode(masm);
306}
307
308
309static void Generate_JSConstructStubHelper(MacroAssembler* masm,
310 bool is_api_function,
311 bool create_memento) {
312 // ----------- S t a t e -------------
313 // -- r3 : number of arguments
314 // -- r4 : constructor function
315 // -- r5 : allocation site or undefined
316 // -- lr : return address
317 // -- sp[...]: constructor arguments
318 // -----------------------------------
319
320 // Should never create mementos for api functions.
321 DCHECK(!is_api_function || !create_memento);
322
323 Isolate* isolate = masm->isolate();
324
325 // Enter a construct frame.
326 {
327 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
328
329 if (create_memento) {
330 __ AssertUndefinedOrAllocationSite(r5, r6);
331 __ push(r5);
332 }
333
334 // Preserve the two incoming parameters on the stack.
335 __ SmiTag(r3);
336 __ push(r3); // Smi-tagged arguments count.
337 __ push(r4); // Constructor function.
338
339 // Try to allocate the object without transitioning into C code. If any of
340 // the preconditions is not met, the code bails out to the runtime call.
341 Label rt_call, allocated;
342 if (FLAG_inline_new) {
343 Label undo_allocation;
344 ExternalReference debug_step_in_fp =
345 ExternalReference::debug_step_in_fp_address(isolate);
346 __ mov(r5, Operand(debug_step_in_fp));
347 __ LoadP(r5, MemOperand(r5));
348 __ cmpi(r5, Operand::Zero());
349 __ bne(&rt_call);
350
351 // Load the initial map and verify that it is in fact a map.
352 // r4: constructor function
353 __ LoadP(r5,
354 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
355 __ JumpIfSmi(r5, &rt_call);
356 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
357 __ bne(&rt_call);
358
359 // Check that the constructor is not constructing a JSFunction (see
360 // comments in Runtime_NewObject in runtime.cc). In which case the
361 // initial map's instance type would be JS_FUNCTION_TYPE.
362 // r4: constructor function
363 // r5: initial map
364 __ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE);
365 __ beq(&rt_call);
366
367 if (!is_api_function) {
368 Label allocate;
369 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
370 // Check if slack tracking is enabled.
371 __ lwz(r7, bit_field3);
372 __ DecodeField<Map::ConstructionCount>(r11, r7);
373 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
374 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking
375 __ beq(&allocate);
376 // Decrease generous allocation count.
377 __ Add(r7, r7, -(1 << Map::ConstructionCount::kShift), r0);
378 __ stw(r7, bit_field3);
379 __ cmpi(r11, Operand(JSFunction::kFinishSlackTracking));
380 __ bne(&allocate);
381
382 __ push(r4);
383
384 __ Push(r5, r4); // r4 = constructor
385 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
386
387 __ Pop(r4, r5);
388
389 __ bind(&allocate);
390 }
391
392 // Now allocate the JSObject on the heap.
393 // r4: constructor function
394 // r5: initial map
395 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
396 if (create_memento) {
397 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
398 }
399
400 __ Allocate(r6, r7, r8, r9, &rt_call, SIZE_IN_WORDS);
401
402 // Allocated the JSObject, now initialize the fields. Map is set to
403 // initial map and properties and elements are set to empty fixed array.
404 // r4: constructor function
405 // r5: initial map
406 // r6: object size (not including memento if create_memento)
407 // r7: JSObject (not tagged)
408 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
409 __ mr(r8, r7);
410 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset));
411 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset));
412 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset));
413 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize));
414
415 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2));
416 __ add(r9, r7, r9); // End of object.
417
418 // Fill all the in-object properties with the appropriate filler.
419 // r4: constructor function
420 // r5: initial map
421 // r6: object size (in words, including memento if create_memento)
422 // r7: JSObject (not tagged)
423 // r8: First in-object property of JSObject (not tagged)
424 // r9: End of object
425 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
426 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
427
428 if (!is_api_function) {
429 Label no_inobject_slack_tracking;
430
431 // Check if slack tracking is enabled.
432 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
433 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking
434 __ beq(&no_inobject_slack_tracking);
435
436 // Allocate object with a slack.
437 __ lbz(r3, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
438 if (FLAG_debug_code) {
439 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
440 __ add(r0, r8, r0);
441 // r0: offset of first field after pre-allocated fields
442 __ cmp(r0, r9);
443 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
444 }
445 {
446 Label done;
447 __ cmpi(r3, Operand::Zero());
448 __ beq(&done);
449 __ InitializeNFieldsWithFiller(r8, r3, r10);
450 __ bind(&done);
451 }
452 // To allow for truncation.
453 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex);
454 // Fill the remaining fields with one pointer filler map.
455
456 __ bind(&no_inobject_slack_tracking);
457 }
458
459 if (create_memento) {
460 __ subi(r3, r9, Operand(AllocationMemento::kSize));
461 __ InitializeFieldsWithFiller(r8, r3, r10);
462
463 // Fill in memento fields.
464 // r8: points to the allocated but uninitialized memento.
465 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
466 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
467 // Load the AllocationSite
468 __ LoadP(r10, MemOperand(sp, 2 * kPointerSize));
469 __ StoreP(r10,
470 MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
471 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
472 kPointerSize));
473 } else {
474 __ InitializeFieldsWithFiller(r8, r9, r10);
475 }
476
477 // Add the object tag to make the JSObject real, so that we can continue
478 // and jump into the continuation code at any time from now on. Any
479 // failures need to undo the allocation, so that the heap is in a
480 // consistent state and verifiable.
481 __ addi(r7, r7, Operand(kHeapObjectTag));
482
483 // Check if a non-empty properties array is needed. Continue with
484 // allocated object if not fall through to runtime call if it is.
485 // r4: constructor function
486 // r7: JSObject
487 // r8: start of next object (not tagged)
488 __ lbz(r6, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
489 // The field instance sizes contains both pre-allocated property fields
490 // and in-object properties.
491 __ lbz(r0, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
492 __ add(r6, r6, r0);
493 __ lbz(r0, FieldMemOperand(r5, Map::kInObjectPropertiesOffset));
494 __ sub(r6, r6, r0, LeaveOE, SetRC);
495
496 // Done if no extra properties are to be allocated.
497 __ beq(&allocated, cr0);
498 __ Assert(ge, kPropertyAllocationCountFailed, cr0);
499
500 // Scale the number of elements by pointer size and add the header for
501 // FixedArrays to the start of the next object calculation from above.
502 // r4: constructor
503 // r6: number of elements in properties array
504 // r7: JSObject
505 // r8: start of next object
506 __ addi(r3, r6, Operand(FixedArray::kHeaderSize / kPointerSize));
507 __ Allocate(
508 r3, r8, r9, r5, &undo_allocation,
509 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
510
511 // Initialize the FixedArray.
512 // r4: constructor
513 // r6: number of elements in properties array
514 // r7: JSObject
515 // r8: FixedArray (not tagged)
516 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
517 __ mr(r5, r8);
518 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
519 __ StoreP(r9, MemOperand(r5));
520 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
521 __ SmiTag(r3, r6);
522 __ StoreP(r3, MemOperand(r5, kPointerSize));
523 __ addi(r5, r5, Operand(2 * kPointerSize));
524
525 // Initialize the fields to undefined.
526 // r4: constructor function
527 // r5: First element of FixedArray (not tagged)
528 // r6: number of elements in properties array
529 // r7: JSObject
530 // r8: FixedArray (not tagged)
531 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
532 {
533 Label done;
534 __ cmpi(r6, Operand::Zero());
535 __ beq(&done);
536 if (!is_api_function || create_memento) {
537 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
538 } else if (FLAG_debug_code) {
539 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
540 __ cmp(r10, r11);
541 __ Assert(eq, kUndefinedValueNotLoaded);
542 }
543 __ InitializeNFieldsWithFiller(r5, r6, r10);
544 __ bind(&done);
545 }
546
547 // Store the initialized FixedArray into the properties field of
548 // the JSObject
549 // r4: constructor function
550 // r7: JSObject
551 // r8: FixedArray (not tagged)
552 __ addi(r8, r8, Operand(kHeapObjectTag)); // Add the heap tag.
553 __ StoreP(r8, FieldMemOperand(r7, JSObject::kPropertiesOffset), r0);
554
555 // Continue with JSObject being successfully allocated
556 // r4: constructor function
557 // r7: JSObject
558 __ b(&allocated);
559
560 // Undo the setting of the new top so that the heap is verifiable. For
561 // example, the map's unused properties potentially do not match the
562 // allocated objects unused properties.
563 // r7: JSObject (previous new top)
564 __ bind(&undo_allocation);
565 __ UndoAllocationInNewSpace(r7, r8);
566 }
567
568 // Allocate the new receiver object using the runtime call.
569 // r4: constructor function
570 __ bind(&rt_call);
571 if (create_memento) {
572 // Get the cell or allocation site.
573 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
574 __ push(r5);
575 }
576
577 __ push(r4); // argument for Runtime_NewObject
578 if (create_memento) {
579 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
580 } else {
581 __ CallRuntime(Runtime::kNewObject, 1);
582 }
583 __ mr(r7, r3);
584
585 // If we ended up using the runtime, and we want a memento, then the
586 // runtime call made it for us, and we shouldn't do create count
587 // increment.
588 Label count_incremented;
589 if (create_memento) {
590 __ b(&count_incremented);
591 }
592
593 // Receiver for constructor call allocated.
594 // r7: JSObject
595 __ bind(&allocated);
596
597 if (create_memento) {
598 __ LoadP(r5, MemOperand(sp, kPointerSize * 2));
599 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
600 __ cmp(r5, r8);
601 __ beq(&count_incremented);
602 // r5 is an AllocationSite. We are creating a memento from it, so we
603 // need to increment the memento create count.
604 __ LoadP(
605 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
606 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
607 __ StoreP(
608 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
609 r0);
610 __ bind(&count_incremented);
611 }
612
613 __ Push(r7, r7);
614
615 // Reload the number of arguments and the constructor from the stack.
616 // sp[0]: receiver
617 // sp[1]: receiver
618 // sp[2]: constructor function
619 // sp[3]: number of arguments (smi-tagged)
620 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
621 __ LoadP(r6, MemOperand(sp, 3 * kPointerSize));
622
623 // Set up pointer to last argument.
624 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
625
626 // Set up number of arguments for function call below
627 __ SmiUntag(r3, r6);
628
629 // Copy arguments and receiver to the expression stack.
630 // r3: number of arguments
631 // r4: constructor function
632 // r5: address of last argument (caller sp)
633 // r6: number of arguments (smi-tagged)
634 // sp[0]: receiver
635 // sp[1]: receiver
636 // sp[2]: constructor function
637 // sp[3]: number of arguments (smi-tagged)
638 Label loop, no_args;
639 __ cmpi(r3, Operand::Zero());
640 __ beq(&no_args);
641 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
642 __ mtctr(r3);
643 __ bind(&loop);
644 __ subi(ip, ip, Operand(kPointerSize));
645 __ LoadPX(r0, MemOperand(r5, ip));
646 __ push(r0);
647 __ bdnz(&loop);
648 __ bind(&no_args);
649
650 // Call the function.
651 // r3: number of arguments
652 // r4: constructor function
653 if (is_api_function) {
654 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
655 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
656 __ Call(code, RelocInfo::CODE_TARGET);
657 } else {
658 ParameterCount actual(r3);
659 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
660 }
661
662 // Store offset of return address for deoptimizer.
663 if (!is_api_function) {
664 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
665 }
666
667 // Restore context from the frame.
668 // r3: result
669 // sp[0]: receiver
670 // sp[1]: constructor function
671 // sp[2]: number of arguments (smi-tagged)
672 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
673
674 // If the result is an object (in the ECMA sense), we should get rid
675 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
676 // on page 74.
677 Label use_receiver, exit;
678
679 // If the result is a smi, it is *not* an object in the ECMA sense.
680 // r3: result
681 // sp[0]: receiver (newly allocated object)
682 // sp[1]: constructor function
683 // sp[2]: number of arguments (smi-tagged)
684 __ JumpIfSmi(r3, &use_receiver);
685
686 // If the type of the result (stored in its map) is less than
687 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
688 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE);
689 __ bge(&exit);
690
691 // Throw away the result of the constructor invocation and use the
692 // on-stack receiver as the result.
693 __ bind(&use_receiver);
694 __ LoadP(r3, MemOperand(sp));
695
696 // Remove receiver from the stack, remove caller arguments, and
697 // return.
698 __ bind(&exit);
699 // r3: result
700 // sp[0]: receiver (newly allocated object)
701 // sp[1]: constructor function
702 // sp[2]: number of arguments (smi-tagged)
703 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
704
705 // Leave construct frame.
706 }
707
708 __ SmiToPtrArrayOffset(r4, r4);
709 __ add(sp, sp, r4);
710 __ addi(sp, sp, Operand(kPointerSize));
711 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
712 __ blr();
713}
714
715
716void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
717 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
718}
719
720
721void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
722 Generate_JSConstructStubHelper(masm, true, false);
723}
724
725
726static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
727 bool is_construct) {
728 // Called from Generate_JS_Entry
729 // r3: code entry
730 // r4: function
731 // r5: receiver
732 // r6: argc
733 // r7: argv
734 // r0,r8-r9, cp may be clobbered
735 ProfileEntryHookStub::MaybeCallEntryHook(masm);
736
737 // Clear the context before we push it when entering the internal frame.
738 __ li(cp, Operand::Zero());
739
740 // Enter an internal frame.
741 {
742 FrameScope scope(masm, StackFrame::INTERNAL);
743
744 // Set up the context from the function argument.
745 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
746
747 __ InitializeRootRegister();
748
749 // Push the function and the receiver onto the stack.
750 __ push(r4);
751 __ push(r5);
752
753 // Copy arguments to the stack in a loop.
754 // r4: function
755 // r6: argc
756 // r7: argv, i.e. points to first arg
757 Label loop, entry;
758 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
759 __ add(r5, r7, r0);
760 // r5 points past last arg.
761 __ b(&entry);
762 __ bind(&loop);
763 __ LoadP(r8, MemOperand(r7)); // read next parameter
764 __ addi(r7, r7, Operand(kPointerSize));
765 __ LoadP(r0, MemOperand(r8)); // dereference handle
766 __ push(r0); // push parameter
767 __ bind(&entry);
768 __ cmp(r7, r5);
769 __ bne(&loop);
770
771 // Initialize all JavaScript callee-saved registers, since they will be seen
772 // by the garbage collector as part of handlers.
773 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
774 __ mr(r14, r7);
775 __ mr(r15, r7);
776 __ mr(r16, r7);
777 __ mr(r17, r7);
778
779 // Invoke the code and pass argc as r3.
780 __ mr(r3, r6);
781 if (is_construct) {
782 // No type feedback cell is available
783 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
784 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
785 __ CallStub(&stub);
786 } else {
787 ParameterCount actual(r3);
788 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
789 }
790 // Exit the JS frame and remove the parameters (except function), and
791 // return.
792 }
793 __ blr();
794
795 // r3: result
796}
797
798
799void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
800 Generate_JSEntryTrampolineHelper(masm, false);
801}
802
803
804void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
805 Generate_JSEntryTrampolineHelper(masm, true);
806}
807
808
809void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
810 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
811 GenerateTailCallToReturnedCode(masm);
812}
813
814
815static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
816 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
817 // Push a copy of the function onto the stack.
818 // Push function as parameter to the runtime call.
819 __ Push(r4, r4);
820 // Whether to compile in a background thread.
821 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
822
823 __ CallRuntime(Runtime::kCompileOptimized, 2);
824 // Restore receiver.
825 __ pop(r4);
826}
827
828
829void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
830 CallCompileOptimized(masm, false);
831 GenerateTailCallToReturnedCode(masm);
832}
833
834
835void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
836 CallCompileOptimized(masm, true);
837 GenerateTailCallToReturnedCode(masm);
838}
839
840
841static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
842 // For now, we are relying on the fact that make_code_young doesn't do any
843 // garbage collection which allows us to save/restore the registers without
844 // worrying about which of them contain pointers. We also don't build an
845 // internal frame to make the code faster, since we shouldn't have to do stack
846 // crawls in MakeCodeYoung. This seems a bit fragile.
847
848 // Point r3 at the start of the PlatformCodeAge sequence.
849 __ mr(r3, ip);
850
851 // The following registers must be saved and restored when calling through to
852 // the runtime:
853 // r3 - contains return address (beginning of patch sequence)
854 // r4 - isolate
855 // lr - return address
856 FrameScope scope(masm, StackFrame::MANUAL);
857 __ mflr(r0);
858 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
859 __ PrepareCallCFunction(2, 0, r5);
860 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
861 __ CallCFunction(
862 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
863 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
864 __ mtlr(r0);
865 __ mr(ip, r3);
866 __ Jump(ip);
867}
868
869#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
870 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
871 MacroAssembler* masm) { \
872 GenerateMakeCodeYoungAgainCommon(masm); \
873 } \
874 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
875 MacroAssembler* masm) { \
876 GenerateMakeCodeYoungAgainCommon(masm); \
877 }
878CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
879#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
880
881
882void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
883 // For now, we are relying on the fact that make_code_young doesn't do any
884 // garbage collection which allows us to save/restore the registers without
885 // worrying about which of them contain pointers. We also don't build an
886 // internal frame to make the code faster, since we shouldn't have to do stack
887 // crawls in MakeCodeYoung. This seems a bit fragile.
888
889 // Point r3 at the start of the PlatformCodeAge sequence.
890 __ mr(r3, ip);
891
892 // The following registers must be saved and restored when calling through to
893 // the runtime:
894 // r3 - contains return address (beginning of patch sequence)
895 // r4 - isolate
896 // lr - return address
897 FrameScope scope(masm, StackFrame::MANUAL);
898 __ mflr(r0);
899 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
900 __ PrepareCallCFunction(2, 0, r5);
901 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
902 __ CallCFunction(
903 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
904 2);
905 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
906 __ mtlr(r0);
907 __ mr(ip, r3);
908
909 // Perform prologue operations usually performed by the young code stub.
910 __ PushFixedFrame(r4);
911 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
912
913 // Jump to point after the code-age stub.
914 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
915 __ Jump(r3);
916}
917
918
919void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
920 GenerateMakeCodeYoungAgainCommon(masm);
921}
922
923
924static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
925 SaveFPRegsMode save_doubles) {
926 {
927 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
928
929 // Preserve registers across notification, this is important for compiled
930 // stubs that tail call the runtime on deopts passing their parameters in
931 // registers.
932 __ MultiPush(kJSCallerSaved | kCalleeSaved);
933 // Pass the function and deoptimization type to the runtime system.
934 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
935 __ MultiPop(kJSCallerSaved | kCalleeSaved);
936 }
937
938 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
939 __ blr(); // Jump to miss handler
940}
941
942
943void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
944 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
945}
946
947
948void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
949 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
950}
951
952
953static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
954 Deoptimizer::BailoutType type) {
955 {
956 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
957 // Pass the function and deoptimization type to the runtime system.
958 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
959 __ push(r3);
960 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
961 }
962
963 // Get the full codegen state from the stack and untag it -> r9.
964 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
965 __ SmiUntag(r9);
966 // Switch on the state.
967 Label with_tos_register, unknown_state;
968 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
969 __ bne(&with_tos_register);
970 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
971 __ Ret();
972
973 __ bind(&with_tos_register);
974 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
975 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
976 __ bne(&unknown_state);
977 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
978 __ Ret();
979
980 __ bind(&unknown_state);
981 __ stop("no cases left");
982}
983
984
985void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
986 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
987}
988
989
990void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
991 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
992}
993
994
995void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
996 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
997}
998
999
1000void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1001 // Lookup the function in the JavaScript frame.
1002 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1003 {
1004 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1005 // Pass function as argument.
1006 __ push(r3);
1007 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1008 }
1009
1010 // If the code object is null, just return to the unoptimized code.
1011 Label skip;
1012 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1013 __ bne(&skip);
1014 __ Ret();
1015
1016 __ bind(&skip);
1017
1018 // Load deoptimization data from the code object.
1019 // <deopt_data> = <code>[#deoptimization_data_offset]
1020 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1021
1022#if V8_OOL_CONSTANT_POOL
1023 {
1024 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1025 __ LoadP(kConstantPoolRegister,
1026 FieldMemOperand(r3, Code::kConstantPoolOffset));
1027#endif
1028
1029 // Load the OSR entrypoint offset from the deoptimization data.
1030 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1031 __ LoadP(r4, FieldMemOperand(
1032 r4, FixedArray::OffsetOfElementAt(
1033 DeoptimizationInputData::kOsrPcOffsetIndex)));
1034 __ SmiUntag(r4);
1035
1036 // Compute the target address = code_obj + header_size + osr_offset
1037 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1038 __ add(r3, r3, r4);
1039 __ addi(r0, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1040 __ mtlr(r0);
1041
1042 // And "return" to the OSR entry point of the function.
1043 __ Ret();
1044#if V8_OOL_CONSTANT_POOL
1045 }
1046#endif
1047}
1048
1049
1050void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1051 // We check the stack limit as indicator that recompilation might be done.
1052 Label ok;
1053 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1054 __ cmpl(sp, ip);
1055 __ bge(&ok);
1056 {
1057 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1058 __ CallRuntime(Runtime::kStackGuard, 0);
1059 }
1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1061 RelocInfo::CODE_TARGET);
1062
1063 __ bind(&ok);
1064 __ Ret();
1065}
1066
1067
1068void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1069 // 1. Make sure we have at least one argument.
1070 // r3: actual number of arguments
1071 {
1072 Label done;
1073 __ cmpi(r3, Operand::Zero());
1074 __ bne(&done);
1075 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1076 __ push(r5);
1077 __ addi(r3, r3, Operand(1));
1078 __ bind(&done);
1079 }
1080
1081 // 2. Get the function to call (passed as receiver) from the stack, check
1082 // if it is a function.
1083 // r3: actual number of arguments
1084 Label slow, non_function;
1085 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
1086 __ add(r4, sp, r4);
1087 __ LoadP(r4, MemOperand(r4));
1088 __ JumpIfSmi(r4, &non_function);
1089 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1090 __ bne(&slow);
1091
1092 // 3a. Patch the first argument if necessary when calling a function.
1093 // r3: actual number of arguments
1094 // r4: function
1095 Label shift_arguments;
1096 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
1097 {
1098 Label convert_to_object, use_global_proxy, patch_receiver;
1099 // Change context eagerly in case we need the global receiver.
1100 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1101
1102 // Do not transform the receiver for strict mode functions.
1103 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1104 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1105 __ TestBit(r6,
1106#if V8_TARGET_ARCH_PPC64
1107 SharedFunctionInfo::kStrictModeFunction,
1108#else
1109 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1110#endif
1111 r0);
1112 __ bne(&shift_arguments, cr0);
1113
1114 // Do not transform the receiver for native (Compilerhints already in r6).
1115 __ TestBit(r6,
1116#if V8_TARGET_ARCH_PPC64
1117 SharedFunctionInfo::kNative,
1118#else
1119 SharedFunctionInfo::kNative + kSmiTagSize,
1120#endif
1121 r0);
1122 __ bne(&shift_arguments, cr0);
1123
1124 // Compute the receiver in sloppy mode.
1125 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1126 __ add(r5, sp, ip);
1127 __ LoadP(r5, MemOperand(r5, -kPointerSize));
1128 // r3: actual number of arguments
1129 // r4: function
1130 // r5: first argument
1131 __ JumpIfSmi(r5, &convert_to_object);
1132
1133 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1134 __ cmp(r5, r6);
1135 __ beq(&use_global_proxy);
1136 __ LoadRoot(r6, Heap::kNullValueRootIndex);
1137 __ cmp(r5, r6);
1138 __ beq(&use_global_proxy);
1139
1140 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1141 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
1142 __ bge(&shift_arguments);
1143
1144 __ bind(&convert_to_object);
1145
1146 {
1147 // Enter an internal frame in order to preserve argument count.
1148 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1149 __ SmiTag(r3);
1150 __ Push(r3, r5);
1151 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1152 __ mr(r5, r3);
1153
1154 __ pop(r3);
1155 __ SmiUntag(r3);
1156
1157 // Exit the internal frame.
1158 }
1159
1160 // Restore the function to r4, and the flag to r7.
1161 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1162 __ add(r7, sp, r7);
1163 __ LoadP(r4, MemOperand(r7));
1164 __ li(r7, Operand::Zero());
1165 __ b(&patch_receiver);
1166
1167 __ bind(&use_global_proxy);
1168 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1169 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
1170
1171 __ bind(&patch_receiver);
1172 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1173 __ add(r6, sp, ip);
1174 __ StoreP(r5, MemOperand(r6, -kPointerSize));
1175
1176 __ b(&shift_arguments);
1177 }
1178
1179 // 3b. Check for function proxy.
1180 __ bind(&slow);
1181 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1182 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
1183 __ beq(&shift_arguments);
1184 __ bind(&non_function);
1185 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
1186
1187 // 3c. Patch the first argument when calling a non-function. The
1188 // CALL_NON_FUNCTION builtin expects the non-function callee as
1189 // receiver, so overwrite the first argument which will ultimately
1190 // become the receiver.
1191 // r3: actual number of arguments
1192 // r4: function
1193 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1194 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1195 __ add(r5, sp, ip);
1196 __ StoreP(r4, MemOperand(r5, -kPointerSize));
1197
1198 // 4. Shift arguments and return address one slot down on the stack
1199 // (overwriting the original receiver). Adjust argument count to make
1200 // the original first argument the new receiver.
1201 // r3: actual number of arguments
1202 // r4: function
1203 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1204 __ bind(&shift_arguments);
1205 {
1206 Label loop;
1207 // Calculate the copy start address (destination). Copy end address is sp.
1208 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1209 __ add(r5, sp, ip);
1210
1211 __ bind(&loop);
1212 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1213 __ StoreP(ip, MemOperand(r5));
1214 __ subi(r5, r5, Operand(kPointerSize));
1215 __ cmp(r5, sp);
1216 __ bne(&loop);
1217 // Adjust the actual number of arguments and remove the top element
1218 // (which is a copy of the last argument).
1219 __ subi(r3, r3, Operand(1));
1220 __ pop();
1221 }
1222
1223 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1224 // or a function proxy via CALL_FUNCTION_PROXY.
1225 // r3: actual number of arguments
1226 // r4: function
1227 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1228 {
1229 Label function, non_proxy;
1230 __ cmpi(r7, Operand::Zero());
1231 __ beq(&function);
1232 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1233 __ li(r5, Operand::Zero());
1234 __ cmpi(r7, Operand(1));
1235 __ bne(&non_proxy);
1236
1237 __ push(r4); // re-add proxy object as additional argument
1238 __ addi(r3, r3, Operand(1));
1239 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1240 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1241 RelocInfo::CODE_TARGET);
1242
1243 __ bind(&non_proxy);
1244 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION);
1245 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1246 RelocInfo::CODE_TARGET);
1247 __ bind(&function);
1248 }
1249
1250 // 5b. Get the code to call from the function and check that the number of
1251 // expected arguments matches what we're providing. If so, jump
1252 // (tail-call) to the code in register edx without checking arguments.
1253 // r3: actual number of arguments
1254 // r4: function
1255 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1256 __ LoadWordArith(
1257 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1258#if !V8_TARGET_ARCH_PPC64
1259 __ SmiUntag(r5);
1260#endif
1261 __ cmp(r5, r3); // Check formal and actual parameter counts.
1262 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1263 RelocInfo::CODE_TARGET, ne);
1264
1265 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1266 ParameterCount expected(0);
1267 __ InvokeCode(ip, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1268}
1269
1270
1271void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1272 const int kIndexOffset =
1273 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1274 const int kLimitOffset =
1275 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1276 const int kArgsOffset = 2 * kPointerSize;
1277 const int kRecvOffset = 3 * kPointerSize;
1278 const int kFunctionOffset = 4 * kPointerSize;
1279
1280 {
1281 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1282
1283 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1284 __ push(r3);
1285 __ LoadP(r3, MemOperand(fp, kArgsOffset)); // get the args array
1286 __ push(r3);
1287 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1288
1289 // Check the stack for overflow. We are not trying to catch
1290 // interruptions (e.g. debug break and preemption) here, so the "real stack
1291 // limit" is checked.
1292 Label okay;
1293 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1294 // Make r5 the space we have left. The stack might already be overflowed
1295 // here which will cause r5 to become negative.
1296 __ sub(r5, sp, r5);
1297 // Check if the arguments will overflow the stack.
1298 __ SmiToPtrArrayOffset(r0, r3);
1299 __ cmp(r5, r0);
1300 __ bgt(&okay); // Signed comparison.
1301
1302 // Out of stack space.
1303 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1304 __ Push(r4, r3);
1305 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1306 // End of stack check.
1307
1308 // Push current limit and index.
1309 __ bind(&okay);
1310 __ li(r4, Operand::Zero());
1311 __ Push(r3, r4); // limit and initial index.
1312
1313 // Get the receiver.
1314 __ LoadP(r3, MemOperand(fp, kRecvOffset));
1315
1316 // Check that the function is a JS function (otherwise it must be a proxy).
1317 Label push_receiver;
1318 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1319 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1320 __ bne(&push_receiver);
1321
1322 // Change context eagerly to get the right global object if necessary.
1323 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1324 // Load the shared function info while the function is still in r4.
1325 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1326
1327 // Compute the receiver.
1328 // Do not transform the receiver for strict mode functions.
1329 Label call_to_object, use_global_proxy;
1330 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1331 __ TestBit(r5,
1332#if V8_TARGET_ARCH_PPC64
1333 SharedFunctionInfo::kStrictModeFunction,
1334#else
1335 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1336#endif
1337 r0);
1338 __ bne(&push_receiver, cr0);
1339
1340 // Do not transform the receiver for strict mode functions.
1341 __ TestBit(r5,
1342#if V8_TARGET_ARCH_PPC64
1343 SharedFunctionInfo::kNative,
1344#else
1345 SharedFunctionInfo::kNative + kSmiTagSize,
1346#endif
1347 r0);
1348 __ bne(&push_receiver, cr0);
1349
1350 // Compute the receiver in sloppy mode.
1351 __ JumpIfSmi(r3, &call_to_object);
1352 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1353 __ cmp(r3, r4);
1354 __ beq(&use_global_proxy);
1355 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1356 __ cmp(r3, r4);
1357 __ beq(&use_global_proxy);
1358
1359 // Check if the receiver is already a JavaScript object.
1360 // r3: receiver
1361 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1362 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1363 __ bge(&push_receiver);
1364
1365 // Convert the receiver to a regular object.
1366 // r3: receiver
1367 __ bind(&call_to_object);
1368 __ push(r3);
1369 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1370 __ b(&push_receiver);
1371
1372 __ bind(&use_global_proxy);
1373 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1374 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
1375
1376 // Push the receiver.
1377 // r3: receiver
1378 __ bind(&push_receiver);
1379 __ push(r3);
1380
1381 // Copy all arguments from the array to the stack.
1382 Label entry, loop;
1383 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1384 __ b(&entry);
1385
1386 // Load the current argument from the arguments array and push it to the
1387 // stack.
1388 // r3: current argument index
1389 __ bind(&loop);
1390 __ LoadP(r4, MemOperand(fp, kArgsOffset));
1391 __ Push(r4, r3);
1392
1393 // Call the runtime to access the property in the arguments array.
1394 __ CallRuntime(Runtime::kGetProperty, 2);
1395 __ push(r3);
1396
1397 // Use inline caching to access the arguments.
1398 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1399 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1400 __ StoreP(r3, MemOperand(fp, kIndexOffset));
1401
1402 // Test if the copy loop has finished copying all the elements from the
1403 // arguments object.
1404 __ bind(&entry);
1405 __ LoadP(r4, MemOperand(fp, kLimitOffset));
1406 __ cmp(r3, r4);
1407 __ bne(&loop);
1408
1409 // Call the function.
1410 Label call_proxy;
1411 ParameterCount actual(r3);
1412 __ SmiUntag(r3);
1413 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1414 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1415 __ bne(&call_proxy);
1416 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
1417
1418 __ LeaveFrame(StackFrame::INTERNAL, 3 * kPointerSize);
1419 __ blr();
1420
1421 // Call the function proxy.
1422 __ bind(&call_proxy);
1423 __ push(r4); // add function proxy as last argument
1424 __ addi(r3, r3, Operand(1));
1425 __ li(r5, Operand::Zero());
1426 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1427 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1428 RelocInfo::CODE_TARGET);
1429
1430 // Tear down the internal frame and remove function, receiver and args.
1431 }
1432 __ addi(sp, sp, Operand(3 * kPointerSize));
1433 __ blr();
1434}
1435
1436
1437static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1438 Label* stack_overflow) {
1439 // ----------- S t a t e -------------
1440 // -- r3 : actual number of arguments
1441 // -- r4 : function (passed through to callee)
1442 // -- r5 : expected number of arguments
1443 // -----------------------------------
1444 // Check the stack for overflow. We are not trying to catch
1445 // interruptions (e.g. debug break and preemption) here, so the "real stack
1446 // limit" is checked.
1447 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1448 // Make r8 the space we have left. The stack might already be overflowed
1449 // here which will cause r8 to become negative.
1450 __ sub(r8, sp, r8);
1451 // Check if the arguments will overflow the stack.
1452 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1453 __ cmp(r8, r0);
1454 __ ble(stack_overflow); // Signed comparison.
1455}
1456
1457
1458static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1459 __ SmiTag(r3);
1460 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1461 __ mflr(r0);
1462 __ push(r0);
1463#if V8_OOL_CONSTANT_POOL
1464 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1465#else
1466 __ Push(fp, r7, r4, r3);
1467#endif
1468 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1469 kPointerSize));
1470}
1471
1472
1473static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1474 // ----------- S t a t e -------------
1475 // -- r3 : result being passed through
1476 // -----------------------------------
1477 // Get the number of arguments passed (as a smi), tear down the frame and
1478 // then tear down the parameters.
1479 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1480 kPointerSize)));
1481 int stack_adjustment = kPointerSize; // adjust for receiver
1482 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1483 __ SmiToPtrArrayOffset(r0, r4);
1484 __ add(sp, sp, r0);
1485}
1486
1487
1488void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1489 // ----------- S t a t e -------------
1490 // -- r3 : actual number of arguments
1491 // -- r4 : function (passed through to callee)
1492 // -- r5 : expected number of arguments
1493 // -----------------------------------
1494
1495 Label stack_overflow;
1496 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1497 Label invoke, dont_adapt_arguments;
1498
1499 Label enough, too_few;
1500 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1501 __ cmp(r3, r5);
1502 __ blt(&too_few);
1503 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1504 __ beq(&dont_adapt_arguments);
1505
1506 { // Enough parameters: actual >= expected
1507 __ bind(&enough);
1508 EnterArgumentsAdaptorFrame(masm);
1509
1510 // Calculate copy start address into r3 and copy end address into r5.
1511 // r3: actual number of arguments as a smi
1512 // r4: function
1513 // r5: expected number of arguments
1514 // ip: code entry to call
1515 __ SmiToPtrArrayOffset(r3, r3);
1516 __ add(r3, r3, fp);
1517 // adjust for return address and receiver
1518 __ addi(r3, r3, Operand(2 * kPointerSize));
1519 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1520 __ sub(r5, r3, r5);
1521
1522 // Copy the arguments (including the receiver) to the new stack frame.
1523 // r3: copy start address
1524 // r4: function
1525 // r5: copy end address
1526 // ip: code entry to call
1527
1528 Label copy;
1529 __ bind(&copy);
1530 __ LoadP(r0, MemOperand(r3, 0));
1531 __ push(r0);
1532 __ cmp(r3, r5); // Compare before moving to next argument.
1533 __ subi(r3, r3, Operand(kPointerSize));
1534 __ bne(&copy);
1535
1536 __ b(&invoke);
1537 }
1538
1539 { // Too few parameters: Actual < expected
1540 __ bind(&too_few);
1541 EnterArgumentsAdaptorFrame(masm);
1542
1543 // Calculate copy start address into r0 and copy end address is fp.
1544 // r3: actual number of arguments as a smi
1545 // r4: function
1546 // r5: expected number of arguments
1547 // ip: code entry to call
1548 __ SmiToPtrArrayOffset(r3, r3);
1549 __ add(r3, r3, fp);
1550
1551 // Copy the arguments (including the receiver) to the new stack frame.
1552 // r3: copy start address
1553 // r4: function
1554 // r5: expected number of arguments
1555 // ip: code entry to call
1556 Label copy;
1557 __ bind(&copy);
1558 // Adjust load for return address and receiver.
1559 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
1560 __ push(r0);
1561 __ cmp(r3, fp); // Compare before moving to next argument.
1562 __ subi(r3, r3, Operand(kPointerSize));
1563 __ bne(&copy);
1564
1565 // Fill the remaining expected arguments with undefined.
1566 // r4: function
1567 // r5: expected number of arguments
1568 // ip: code entry to call
1569 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1570 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1571 __ sub(r5, fp, r5);
1572 // Adjust for frame.
1573 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1574 2 * kPointerSize));
1575
1576 Label fill;
1577 __ bind(&fill);
1578 __ push(r0);
1579 __ cmp(sp, r5);
1580 __ bne(&fill);
1581 }
1582
1583 // Call the entry point.
1584 __ bind(&invoke);
1585 __ CallJSEntry(ip);
1586
1587 // Store offset of return address for deoptimizer.
1588 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1589
1590 // Exit frame and return.
1591 LeaveArgumentsAdaptorFrame(masm);
1592 __ blr();
1593
1594
1595 // -------------------------------------------
1596 // Dont adapt arguments.
1597 // -------------------------------------------
1598 __ bind(&dont_adapt_arguments);
1599 __ JumpToJSEntry(ip);
1600
1601 __ bind(&stack_overflow);
1602 {
1603 FrameScope frame(masm, StackFrame::MANUAL);
1604 EnterArgumentsAdaptorFrame(masm);
1605 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1606 __ bkpt(0);
1607 }
1608}
1609
1610
1611#undef __
1612}
1613} // namespace v8::internal
1614
1615#endif // V8_TARGET_ARCH_PPC