blob: c95ff3078533c95a407006ceb856c10033588830 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5
6
7#include "src/v8.h"
8
9#if V8_TARGET_ARCH_MIPS64
10
11#include "src/codegen.h"
12#include "src/debug.h"
13#include "src/deoptimizer.h"
14#include "src/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000016
17namespace v8 {
18namespace internal {
19
20
21#define __ ACCESS_MASM(masm)
22
23
24void Builtins::Generate_Adaptor(MacroAssembler* masm,
25 CFunctionId id,
26 BuiltinExtraArguments extra_args) {
27 // ----------- S t a t e -------------
28 // -- a0 : number of arguments excluding receiver
29 // -- a1 : called function (only guaranteed when
30 // -- extra_args requires it)
31 // -- cp : context
32 // -- sp[0] : last argument
33 // -- ...
34 // -- sp[8 * (argc - 1)] : first argument
35 // -- sp[8 * agrc] : receiver
36 // -----------------------------------
37
38 // Insert extra arguments.
39 int num_extra_args = 0;
40 if (extra_args == NEEDS_CALLED_FUNCTION) {
41 num_extra_args = 1;
42 __ push(a1);
43 } else {
44 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 }
46
Emily Bernierd0a1eb72015-03-24 16:35:39 -040047 // JumpToExternalReference expects a0 to contain the number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 // including the receiver and the extra arguments.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040049 __ Daddu(a0, a0, num_extra_args + 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000050 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
51}
52
53
54// Load the built-in InternalArray function from the current context.
55static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
56 Register result) {
57 // Load the native context.
58
59 __ ld(result,
60 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
61 __ ld(result,
62 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
63 // Load the InternalArray function from the native context.
64 __ ld(result,
65 MemOperand(result,
66 Context::SlotOffset(
67 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
68}
69
70
71// Load the built-in Array function from the current context.
72static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
73 // Load the native context.
74
75 __ ld(result,
76 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
77 __ ld(result,
78 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
79 // Load the Array function from the native context.
80 __ ld(result,
81 MemOperand(result,
82 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
83}
84
85
86void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
87 // ----------- S t a t e -------------
88 // -- a0 : number of arguments
89 // -- ra : return address
90 // -- sp[...]: constructor arguments
91 // -----------------------------------
92 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
93
94 // Get the InternalArray function.
95 GenerateLoadInternalArrayFunction(masm, a1);
96
97 if (FLAG_debug_code) {
98 // Initial map for the builtin InternalArray functions should be maps.
99 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
100 __ SmiTst(a2, a4);
101 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
102 a4, Operand(zero_reg));
103 __ GetObjectType(a2, a3, a4);
104 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
105 a4, Operand(MAP_TYPE));
106 }
107
108 // Run the native code for the InternalArray function called as a normal
109 // function.
110 // Tail call a stub.
111 InternalArrayConstructorStub stub(masm->isolate());
112 __ TailCallStub(&stub);
113}
114
115
116void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
117 // ----------- S t a t e -------------
118 // -- a0 : number of arguments
119 // -- ra : return address
120 // -- sp[...]: constructor arguments
121 // -----------------------------------
122 Label generic_array_code;
123
124 // Get the Array function.
125 GenerateLoadArrayFunction(masm, a1);
126
127 if (FLAG_debug_code) {
128 // Initial map for the builtin Array functions should be maps.
129 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
130 __ SmiTst(a2, a4);
131 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
132 a4, Operand(zero_reg));
133 __ GetObjectType(a2, a3, a4);
134 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
135 a4, Operand(MAP_TYPE));
136 }
137
138 // Run the native code for the Array function called as a normal function.
139 // Tail call a stub.
140 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
141 ArrayConstructorStub stub(masm->isolate());
142 __ TailCallStub(&stub);
143}
144
145
146void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
147 // ----------- S t a t e -------------
148 // -- a0 : number of arguments
149 // -- a1 : constructor function
150 // -- ra : return address
151 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
152 // -- sp[argc * 8] : receiver
153 // -----------------------------------
154 Counters* counters = masm->isolate()->counters();
155 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
156
157 Register function = a1;
158 if (FLAG_debug_code) {
159 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
160 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
161 }
162
163 // Load the first arguments in a0 and get rid of the rest.
164 Label no_arguments;
165 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
166 // First args = sp[(argc - 1) * 8].
167 __ Dsubu(a0, a0, Operand(1));
168 __ dsll(a0, a0, kPointerSizeLog2);
169 __ Daddu(sp, a0, sp);
170 __ ld(a0, MemOperand(sp));
171 // sp now point to args[0], drop args[0] + receiver.
172 __ Drop(2);
173
174 Register argument = a2;
175 Label not_cached, argument_is_string;
176 __ LookupNumberStringCache(a0, // Input.
177 argument, // Result.
178 a3, // Scratch.
179 a4, // Scratch.
180 a5, // Scratch.
181 &not_cached);
182 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, a4);
183 __ bind(&argument_is_string);
184
185 // ----------- S t a t e -------------
186 // -- a2 : argument converted to string
187 // -- a1 : constructor function
188 // -- ra : return address
189 // -----------------------------------
190
191 Label gc_required;
192 __ Allocate(JSValue::kSize,
193 v0, // Result.
194 a3, // Scratch.
195 a4, // Scratch.
196 &gc_required,
197 TAG_OBJECT);
198
199 // Initialising the String Object.
200 Register map = a3;
201 __ LoadGlobalFunctionInitialMap(function, map, a4);
202 if (FLAG_debug_code) {
203 __ lbu(a4, FieldMemOperand(map, Map::kInstanceSizeOffset));
204 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
205 a4, Operand(JSValue::kSize >> kPointerSizeLog2));
206 __ lbu(a4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
207 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
208 a4, Operand(zero_reg));
209 }
210 __ sd(map, FieldMemOperand(v0, HeapObject::kMapOffset));
211
212 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
213 __ sd(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
214 __ sd(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
215
216 __ sd(argument, FieldMemOperand(v0, JSValue::kValueOffset));
217
218 // Ensure the object is fully initialized.
219 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
220
221 __ Ret();
222
223 // The argument was not found in the number to string cache. Check
224 // if it's a string already before calling the conversion builtin.
225 Label convert_argument;
226 __ bind(&not_cached);
227 __ JumpIfSmi(a0, &convert_argument);
228
229 // Is it a String?
230 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
231 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
232 STATIC_ASSERT(kNotStringTag != 0);
233 __ And(a4, a3, Operand(kIsNotStringMask));
234 __ Branch(&convert_argument, ne, a4, Operand(zero_reg));
235 __ mov(argument, a0);
236 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, a4);
237 __ Branch(&argument_is_string);
238
239 // Invoke the conversion builtin and put the result into a2.
240 __ bind(&convert_argument);
241 __ push(function); // Preserve the function.
242 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, a4);
243 {
244 FrameScope scope(masm, StackFrame::INTERNAL);
245 __ push(a0);
246 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
247 }
248 __ pop(function);
249 __ mov(argument, v0);
250 __ Branch(&argument_is_string);
251
252 // Load the empty string into a2, remove the receiver from the
253 // stack, and jump back to the case where the argument is a string.
254 __ bind(&no_arguments);
255 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
256 __ Drop(1);
257 __ Branch(&argument_is_string);
258
259 // At this point the argument is already a string. Call runtime to
260 // create a string wrapper.
261 __ bind(&gc_required);
262 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, a4);
263 {
264 FrameScope scope(masm, StackFrame::INTERNAL);
265 __ push(argument);
266 __ CallRuntime(Runtime::kNewStringWrapper, 1);
267 }
268 __ Ret();
269}
270
271
272static void CallRuntimePassFunction(
273 MacroAssembler* masm, Runtime::FunctionId function_id) {
274 FrameScope scope(masm, StackFrame::INTERNAL);
275 // Push a copy of the function onto the stack.
276 // Push call kind information and function as parameter to the runtime call.
277 __ Push(a1, a1);
278
279 __ CallRuntime(function_id, 1);
280 // Restore call kind information and receiver.
281 __ Pop(a1);
282}
283
284
285static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
286 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
287 __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
288 __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
289 __ Jump(at);
290}
291
292
293static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
294 __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
295 __ Jump(at);
296}
297
298
299void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
300 // Checking whether the queued function is ready for install is optional,
301 // since we come across interrupts and stack checks elsewhere. However,
302 // not checking may delay installing ready functions, and always checking
303 // would be quite expensive. A good compromise is to first check against
304 // stack limit as a cue for an interrupt signal.
305 Label ok;
306 __ LoadRoot(a4, Heap::kStackLimitRootIndex);
307 __ Branch(&ok, hs, sp, Operand(a4));
308
309 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
310 GenerateTailCallToReturnedCode(masm);
311
312 __ bind(&ok);
313 GenerateTailCallToSharedCode(masm);
314}
315
316
317static void Generate_JSConstructStubHelper(MacroAssembler* masm,
318 bool is_api_function,
319 bool create_memento) {
320 // ----------- S t a t e -------------
321 // -- a0 : number of arguments
322 // -- a1 : constructor function
323 // -- a2 : allocation site or undefined
324 // -- ra : return address
325 // -- sp[...]: constructor arguments
326 // -----------------------------------
327
328 // Should never create mementos for api functions.
329 DCHECK(!is_api_function || !create_memento);
330
331 Isolate* isolate = masm->isolate();
332
333 // ----------- S t a t e -------------
334 // -- a0 : number of arguments
335 // -- a1 : constructor function
336 // -- ra : return address
337 // -- sp[...]: constructor arguments
338 // -----------------------------------
339
340 // Enter a construct frame.
341 {
342 FrameScope scope(masm, StackFrame::CONSTRUCT);
343
344 if (create_memento) {
345 __ AssertUndefinedOrAllocationSite(a2, a3);
346 __ push(a2);
347 }
348
349 // Preserve the two incoming parameters on the stack.
350 // Tag arguments count.
351 __ dsll32(a0, a0, 0);
352 __ MultiPushReversed(a0.bit() | a1.bit());
353
354 Label rt_call, allocated;
355 // Try to allocate the object without transitioning into C code. If any of
356 // the preconditions is not met, the code bails out to the runtime call.
357 if (FLAG_inline_new) {
358 Label undo_allocation;
359 ExternalReference debug_step_in_fp =
360 ExternalReference::debug_step_in_fp_address(isolate);
361 __ li(a2, Operand(debug_step_in_fp));
362 __ ld(a2, MemOperand(a2));
363 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
364
365 // Load the initial map and verify that it is in fact a map.
366 // a1: constructor function
367 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
368 __ JumpIfSmi(a2, &rt_call);
369 __ GetObjectType(a2, a3, t0);
370 __ Branch(&rt_call, ne, t0, Operand(MAP_TYPE));
371
372 // Check that the constructor is not constructing a JSFunction (see
373 // comments in Runtime_NewObject in runtime.cc). In which case the
374 // initial map's instance type would be JS_FUNCTION_TYPE.
375 // a1: constructor function
376 // a2: initial map
377 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
378 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
379
380 if (!is_api_function) {
381 Label allocate;
382 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
383 // Check if slack tracking is enabled.
384 __ lwu(a4, bit_field3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400385 __ DecodeField<Map::Counter>(a6, a4);
386 __ Branch(&allocate, lt, a6,
387 Operand(static_cast<int64_t>(Map::kSlackTrackingCounterEnd)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000388 // Decrease generous allocation count.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400389 __ Dsubu(a4, a4, Operand(1 << Map::Counter::kShift));
390 __ Branch(USE_DELAY_SLOT, &allocate, ne, a6,
391 Operand(Map::kSlackTrackingCounterEnd));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000392 __ sw(a4, bit_field3); // In delay slot.
393
394 __ Push(a1, a2, a1); // a1 = Constructor.
395 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
396
397 __ Pop(a1, a2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400398 // Slack tracking counter is Map::kSlackTrackingCounterEnd after runtime
399 // call.
400 __ li(a6, Map::kSlackTrackingCounterEnd);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000401
402 __ bind(&allocate);
403 }
404
405 // Now allocate the JSObject on the heap.
406 // a1: constructor function
407 // a2: initial map
408 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
409 if (create_memento) {
410 __ Daddu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
411 }
412
413 __ Allocate(a3, t0, t1, t2, &rt_call, SIZE_IN_WORDS);
414
415 // Allocated the JSObject, now initialize the fields. Map is set to
416 // initial map and properties and elements are set to empty fixed array.
417 // a1: constructor function
418 // a2: initial map
419 // a3: object size (not including memento if create_memento)
420 // t0: JSObject (not tagged)
421 __ LoadRoot(t2, Heap::kEmptyFixedArrayRootIndex);
422 __ mov(t1, t0);
423 __ sd(a2, MemOperand(t1, JSObject::kMapOffset));
424 __ sd(t2, MemOperand(t1, JSObject::kPropertiesOffset));
425 __ sd(t2, MemOperand(t1, JSObject::kElementsOffset));
426 __ Daddu(t1, t1, Operand(3*kPointerSize));
427 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
428 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
429 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
430
431 // Fill all the in-object properties with appropriate filler.
432 // a1: constructor function
433 // a2: initial map
434 // a3: object size (in words, including memento if create_memento)
435 // t0: JSObject (not tagged)
436 // t1: First in-object property of JSObject (not tagged)
437 // a6: slack tracking counter (non-API function case)
438 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
439
440 // Use t3 to hold undefined, which is used in several places below.
441 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
442
443 if (!is_api_function) {
444 Label no_inobject_slack_tracking;
445
446 // Check if slack tracking is enabled.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400447 __ Branch(&no_inobject_slack_tracking, lt, a6,
448 Operand(static_cast<int64_t>(Map::kSlackTrackingCounterEnd)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449
450 // Allocate object with a slack.
451 __ lwu(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
452 __ Ext(a0, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
453 kBitsPerByte);
454 __ dsll(at, a0, kPointerSizeLog2);
455 __ daddu(a0, t1, at);
456 // a0: offset of first field after pre-allocated fields
457 if (FLAG_debug_code) {
458 __ dsll(at, a3, kPointerSizeLog2);
459 __ Daddu(t2, t0, Operand(at)); // End of object.
460 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
461 a0, Operand(t2));
462 }
463 __ InitializeFieldsWithFiller(t1, a0, t3);
464 // To allow for truncation.
465 __ LoadRoot(t3, Heap::kOnePointerFillerMapRootIndex);
466 // Fill the remaining fields with one pointer filler map.
467
468 __ bind(&no_inobject_slack_tracking);
469 }
470
471 if (create_memento) {
472 __ Dsubu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
473 __ dsll(a0, a0, kPointerSizeLog2);
474 __ Daddu(a0, t0, Operand(a0)); // End of object.
475 __ InitializeFieldsWithFiller(t1, a0, t3);
476
477 // Fill in memento fields.
478 // t1: points to the allocated but uninitialized memento.
479 __ LoadRoot(t3, Heap::kAllocationMementoMapRootIndex);
480 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
481 __ sd(t3, MemOperand(t1));
482 __ Daddu(t1, t1, kPointerSize);
483 // Load the AllocationSite.
484 __ ld(t3, MemOperand(sp, 2 * kPointerSize));
485 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
486 __ sd(t3, MemOperand(t1));
487 __ Daddu(t1, t1, kPointerSize);
488 } else {
489 __ dsll(at, a3, kPointerSizeLog2);
490 __ Daddu(a0, t0, Operand(at)); // End of object.
491 __ InitializeFieldsWithFiller(t1, a0, t3);
492 }
493
494 // Add the object tag to make the JSObject real, so that we can continue
495 // and jump into the continuation code at any time from now on. Any
496 // failures need to undo the allocation, so that the heap is in a
497 // consistent state and verifiable.
498 __ Daddu(t0, t0, Operand(kHeapObjectTag));
499
500 // Check if a non-empty properties array is needed. Continue with
501 // allocated object if not fall through to runtime call if it is.
502 // a1: constructor function
503 // t0: JSObject
504 // t1: start of next object (not tagged)
505 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
506 // The field instance sizes contains both pre-allocated property fields
507 // and in-object properties.
508 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
509 __ Ext(t2, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
510 kBitsPerByte);
511 __ Daddu(a3, a3, Operand(t2));
512 __ Ext(t2, a0, Map::kInObjectPropertiesByte * kBitsPerByte,
513 kBitsPerByte);
514 __ dsubu(a3, a3, t2);
515
516 // Done if no extra properties are to be allocated.
517 __ Branch(&allocated, eq, a3, Operand(zero_reg));
518 __ Assert(greater_equal, kPropertyAllocationCountFailed,
519 a3, Operand(zero_reg));
520
521 // Scale the number of elements by pointer size and add the header for
522 // FixedArrays to the start of the next object calculation from above.
523 // a1: constructor
524 // a3: number of elements in properties array
525 // t0: JSObject
526 // t1: start of next object
527 __ Daddu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
528 __ Allocate(
529 a0,
530 t1,
531 t2,
532 a2,
533 &undo_allocation,
534 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
535
536 // Initialize the FixedArray.
537 // a1: constructor
538 // a3: number of elements in properties array (untagged)
539 // t0: JSObject
540 // t1: start of next object
541 __ LoadRoot(t2, Heap::kFixedArrayMapRootIndex);
542 __ mov(a2, t1);
543 __ sd(t2, MemOperand(a2, JSObject::kMapOffset));
544 // Tag number of elements.
545 __ dsll32(a0, a3, 0);
546 __ sd(a0, MemOperand(a2, FixedArray::kLengthOffset));
547 __ Daddu(a2, a2, Operand(2 * kPointerSize));
548
549 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
550 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
551
552 // Initialize the fields to undefined.
553 // a1: constructor
554 // a2: First element of FixedArray (not tagged)
555 // a3: number of elements in properties array
556 // t0: JSObject
557 // t1: FixedArray (not tagged)
558 __ dsll(a7, a3, kPointerSizeLog2);
559 __ daddu(t2, a2, a7); // End of object.
560 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
561 { Label loop, entry;
562 if (!is_api_function || create_memento) {
563 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
564 } else if (FLAG_debug_code) {
565 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
566 __ Assert(eq, kUndefinedValueNotLoaded, t3, Operand(a6));
567 }
568 __ jmp(&entry);
569 __ bind(&loop);
570 __ sd(t3, MemOperand(a2));
571 __ daddiu(a2, a2, kPointerSize);
572 __ bind(&entry);
573 __ Branch(&loop, less, a2, Operand(t2));
574 }
575
576 // Store the initialized FixedArray into the properties field of
577 // the JSObject.
578 // a1: constructor function
579 // t0: JSObject
580 // t1: FixedArray (not tagged)
581 __ Daddu(t1, t1, Operand(kHeapObjectTag)); // Add the heap tag.
582 __ sd(t1, FieldMemOperand(t0, JSObject::kPropertiesOffset));
583
584 // Continue with JSObject being successfully allocated.
585 // a1: constructor function
586 // a4: JSObject
587 __ jmp(&allocated);
588
589 // Undo the setting of the new top so that the heap is verifiable. For
590 // example, the map's unused properties potentially do not match the
591 // allocated objects unused properties.
592 // t0: JSObject (previous new top)
593 __ bind(&undo_allocation);
594 __ UndoAllocationInNewSpace(t0, t1);
595 }
596
597 // Allocate the new receiver object using the runtime call.
598 // a1: constructor function
599 __ bind(&rt_call);
600 if (create_memento) {
601 // Get the cell or allocation site.
602 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
603 __ push(a2);
604 }
605
606 __ push(a1); // Argument for Runtime_NewObject.
607 if (create_memento) {
608 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
609 } else {
610 __ CallRuntime(Runtime::kNewObject, 1);
611 }
612 __ mov(t0, v0);
613
614 // If we ended up using the runtime, and we want a memento, then the
615 // runtime call made it for us, and we shouldn't do create count
616 // increment.
617 Label count_incremented;
618 if (create_memento) {
619 __ jmp(&count_incremented);
620 }
621
622 // Receiver for constructor call allocated.
623 // t0: JSObject
624 __ bind(&allocated);
625
626 if (create_memento) {
627 __ ld(a2, MemOperand(sp, kPointerSize * 2));
628 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
629 __ Branch(&count_incremented, eq, a2, Operand(t1));
630 // a2 is an AllocationSite. We are creating a memento from it, so we
631 // need to increment the memento create count.
632 __ ld(a3, FieldMemOperand(a2,
633 AllocationSite::kPretenureCreateCountOffset));
634 __ Daddu(a3, a3, Operand(Smi::FromInt(1)));
635 __ sd(a3, FieldMemOperand(a2,
636 AllocationSite::kPretenureCreateCountOffset));
637 __ bind(&count_incremented);
638 }
639
640 __ Push(t0, t0);
641
642 // Reload the number of arguments from the stack.
643 // sp[0]: receiver
644 // sp[1]: receiver
645 // sp[2]: constructor function
646 // sp[3]: number of arguments (smi-tagged)
647 __ ld(a1, MemOperand(sp, 2 * kPointerSize));
648 __ ld(a3, MemOperand(sp, 3 * kPointerSize));
649
650 // Set up pointer to last argument.
651 __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
652
653 // Set up number of arguments for function call below.
654 __ SmiUntag(a0, a3);
655
656 // Copy arguments and receiver to the expression stack.
657 // a0: number of arguments
658 // a1: constructor function
659 // a2: address of last argument (caller sp)
660 // a3: number of arguments (smi-tagged)
661 // sp[0]: receiver
662 // sp[1]: receiver
663 // sp[2]: constructor function
664 // sp[3]: number of arguments (smi-tagged)
665 Label loop, entry;
666 __ SmiUntag(a3);
667 __ jmp(&entry);
668 __ bind(&loop);
669 __ dsll(a4, a3, kPointerSizeLog2);
670 __ Daddu(a4, a2, Operand(a4));
671 __ ld(a5, MemOperand(a4));
672 __ push(a5);
673 __ bind(&entry);
674 __ Daddu(a3, a3, Operand(-1));
675 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
676
677 // Call the function.
678 // a0: number of arguments
679 // a1: constructor function
680 if (is_api_function) {
681 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
682 Handle<Code> code =
683 masm->isolate()->builtins()->HandleApiCallConstruct();
684 __ Call(code, RelocInfo::CODE_TARGET);
685 } else {
686 ParameterCount actual(a0);
687 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
688 }
689
690 // Store offset of return address for deoptimizer.
691 if (!is_api_function) {
692 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
693 }
694
695 // Restore context from the frame.
696 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
697
698 // If the result is an object (in the ECMA sense), we should get rid
699 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
700 // on page 74.
701 Label use_receiver, exit;
702
703 // If the result is a smi, it is *not* an object in the ECMA sense.
704 // v0: result
705 // sp[0]: receiver (newly allocated object)
706 // sp[1]: constructor function
707 // sp[2]: number of arguments (smi-tagged)
708 __ JumpIfSmi(v0, &use_receiver);
709
710 // If the type of the result (stored in its map) is less than
711 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
712 __ GetObjectType(v0, a1, a3);
713 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
714
715 // Throw away the result of the constructor invocation and use the
716 // on-stack receiver as the result.
717 __ bind(&use_receiver);
718 __ ld(v0, MemOperand(sp));
719
720 // Remove receiver from the stack, remove caller arguments, and
721 // return.
722 __ bind(&exit);
723 // v0: result
724 // sp[0]: receiver (newly allocated object)
725 // sp[1]: constructor function
726 // sp[2]: number of arguments (smi-tagged)
727 __ ld(a1, MemOperand(sp, 2 * kPointerSize));
728
729 // Leave construct frame.
730 }
731
732 __ SmiScale(a4, a1, kPointerSizeLog2);
733 __ Daddu(sp, sp, a4);
734 __ Daddu(sp, sp, kPointerSize);
735 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
736 __ Ret();
737}
738
739
740void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
741 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
742}
743
744
745void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
746 Generate_JSConstructStubHelper(masm, true, false);
747}
748
749
750static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
751 bool is_construct) {
752 // Called from JSEntryStub::GenerateBody
753
754 // ----------- S t a t e -------------
755 // -- a0: code entry
756 // -- a1: function
757 // -- a2: receiver_pointer
758 // -- a3: argc
759 // -- s0: argv
760 // -----------------------------------
761 ProfileEntryHookStub::MaybeCallEntryHook(masm);
762 // Clear the context before we push it when entering the JS frame.
763 __ mov(cp, zero_reg);
764
765 // Enter an internal frame.
766 {
767 FrameScope scope(masm, StackFrame::INTERNAL);
768
769 // Set up the context from the function argument.
770 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
771
772 // Push the function and the receiver onto the stack.
773 __ Push(a1, a2);
774
775 // Copy arguments to the stack in a loop.
776 // a3: argc
777 // s0: argv, i.e. points to first arg
778 Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000779 __ dsll(a4, a3, kPointerSizeLog2);
780 __ daddu(a6, s0, a4);
781 __ b(&entry);
782 __ nop(); // Branch delay slot nop.
783 // a6 points past last arg.
784 __ bind(&loop);
785 __ ld(a4, MemOperand(s0)); // Read next parameter.
786 __ daddiu(s0, s0, kPointerSize);
787 __ ld(a4, MemOperand(a4)); // Dereference handle.
788 __ push(a4); // Push parameter.
789 __ bind(&entry);
790 __ Branch(&loop, ne, s0, Operand(a6));
791
792 // Initialize all JavaScript callee-saved registers, since they will be seen
793 // by the garbage collector as part of handlers.
794 __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
795 __ mov(s1, a4);
796 __ mov(s2, a4);
797 __ mov(s3, a4);
798 __ mov(s4, a4);
799 __ mov(s5, a4);
800 // s6 holds the root address. Do not clobber.
801 // s7 is cp. Do not init.
802
803 // Invoke the code and pass argc as a0.
804 __ mov(a0, a3);
805 if (is_construct) {
806 // No type feedback cell is available
807 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
808 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
809 __ CallStub(&stub);
810 } else {
811 ParameterCount actual(a0);
812 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
813 }
814
815 // Leave internal frame.
816 }
817 __ Jump(ra);
818}
819
820
821void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
822 Generate_JSEntryTrampolineHelper(masm, false);
823}
824
825
826void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
827 Generate_JSEntryTrampolineHelper(masm, true);
828}
829
830
831void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
832 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
833 GenerateTailCallToReturnedCode(masm);
834}
835
836
837static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
838 FrameScope scope(masm, StackFrame::INTERNAL);
839 // Push a copy of the function onto the stack.
840 // Push function as parameter to the runtime call.
841 __ Push(a1, a1);
842 // Whether to compile in a background thread.
843 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
844
845 __ CallRuntime(Runtime::kCompileOptimized, 2);
846 // Restore receiver.
847 __ Pop(a1);
848}
849
850
851void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
852 CallCompileOptimized(masm, false);
853 GenerateTailCallToReturnedCode(masm);
854}
855
856
857void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
858 CallCompileOptimized(masm, true);
859 GenerateTailCallToReturnedCode(masm);
860}
861
862
863static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
864 // For now, we are relying on the fact that make_code_young doesn't do any
865 // garbage collection which allows us to save/restore the registers without
866 // worrying about which of them contain pointers. We also don't build an
867 // internal frame to make the code faster, since we shouldn't have to do stack
868 // crawls in MakeCodeYoung. This seems a bit fragile.
869
870 // Set a0 to point to the head of the PlatformCodeAge sequence.
871 __ Dsubu(a0, a0,
872 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
873
874 // The following registers must be saved and restored when calling through to
875 // the runtime:
876 // a0 - contains return address (beginning of patch sequence)
877 // a1 - isolate
878 RegList saved_regs =
879 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
880 FrameScope scope(masm, StackFrame::MANUAL);
881 __ MultiPush(saved_regs);
882 __ PrepareCallCFunction(2, 0, a2);
883 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
884 __ CallCFunction(
885 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
886 __ MultiPop(saved_regs);
887 __ Jump(a0);
888}
889
890#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
891void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
892 MacroAssembler* masm) { \
893 GenerateMakeCodeYoungAgainCommon(masm); \
894} \
895void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
896 MacroAssembler* masm) { \
897 GenerateMakeCodeYoungAgainCommon(masm); \
898}
899CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
900#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
901
902
903void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
904 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
905 // that make_code_young doesn't do any garbage collection which allows us to
906 // save/restore the registers without worrying about which of them contain
907 // pointers.
908
909 // Set a0 to point to the head of the PlatformCodeAge sequence.
910 __ Dsubu(a0, a0,
911 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
912
913 // The following registers must be saved and restored when calling through to
914 // the runtime:
915 // a0 - contains return address (beginning of patch sequence)
916 // a1 - isolate
917 RegList saved_regs =
918 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
919 FrameScope scope(masm, StackFrame::MANUAL);
920 __ MultiPush(saved_regs);
921 __ PrepareCallCFunction(2, 0, a2);
922 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
923 __ CallCFunction(
924 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
925 2);
926 __ MultiPop(saved_regs);
927
928 // Perform prologue operations usually performed by the young code stub.
929 __ Push(ra, fp, cp, a1);
930 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
931
932 // Jump to point after the code-age stub.
933 __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength)));
934 __ Jump(a0);
935}
936
937
938void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
939 GenerateMakeCodeYoungAgainCommon(masm);
940}
941
942
943static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
944 SaveFPRegsMode save_doubles) {
945 {
946 FrameScope scope(masm, StackFrame::INTERNAL);
947
948 // Preserve registers across notification, this is important for compiled
949 // stubs that tail call the runtime on deopts passing their parameters in
950 // registers.
951 __ MultiPush(kJSCallerSaved | kCalleeSaved);
952 // Pass the function and deoptimization type to the runtime system.
953 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
954 __ MultiPop(kJSCallerSaved | kCalleeSaved);
955 }
956
957 __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state
958 __ Jump(ra); // Jump to miss handler
959}
960
961
962void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
963 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
964}
965
966
967void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
968 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
969}
970
971
972static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
973 Deoptimizer::BailoutType type) {
974 {
975 FrameScope scope(masm, StackFrame::INTERNAL);
976 // Pass the function and deoptimization type to the runtime system.
977 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
978 __ push(a0);
979 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
980 }
981
982 // Get the full codegen state from the stack and untag it -> a6.
983 __ ld(a6, MemOperand(sp, 0 * kPointerSize));
984 __ SmiUntag(a6);
985 // Switch on the state.
986 Label with_tos_register, unknown_state;
987 __ Branch(&with_tos_register,
988 ne, a6, Operand(FullCodeGenerator::NO_REGISTERS));
989 __ Ret(USE_DELAY_SLOT);
990 // Safe to fill delay slot Addu will emit one instruction.
991 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
992
993 __ bind(&with_tos_register);
994 __ ld(v0, MemOperand(sp, 1 * kPointerSize));
995 __ Branch(&unknown_state, ne, a6, Operand(FullCodeGenerator::TOS_REG));
996
997 __ Ret(USE_DELAY_SLOT);
998 // Safe to fill delay slot Addu will emit one instruction.
999 __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1000
1001 __ bind(&unknown_state);
1002 __ stop("no cases left");
1003}
1004
1005
1006void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1007 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1008}
1009
1010
1011void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1012 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1013}
1014
1015
1016void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1017 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1018}
1019
1020
1021void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1022 // Lookup the function in the JavaScript frame.
1023 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1024 {
1025 FrameScope scope(masm, StackFrame::INTERNAL);
1026 // Pass function as argument.
1027 __ push(a0);
1028 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1029 }
1030
1031 // If the code object is null, just return to the unoptimized code.
1032 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1033
1034 // Load deoptimization data from the code object.
1035 // <deopt_data> = <code>[#deoptimization_data_offset]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001036 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001037
1038 // Load the OSR entrypoint offset from the deoptimization data.
1039 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1040 __ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1041 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1042 __ SmiUntag(a1);
1043
1044 // Compute the target address = code_obj + header_size + osr_offset
1045 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1046 __ daddu(v0, v0, a1);
1047 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1048
1049 // And "return" to the OSR entry point of the function.
1050 __ Ret();
1051}
1052
1053
1054void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1055 // We check the stack limit as indicator that recompilation might be done.
1056 Label ok;
1057 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1058 __ Branch(&ok, hs, sp, Operand(at));
1059 {
1060 FrameScope scope(masm, StackFrame::INTERNAL);
1061 __ CallRuntime(Runtime::kStackGuard, 0);
1062 }
1063 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1064 RelocInfo::CODE_TARGET);
1065
1066 __ bind(&ok);
1067 __ Ret();
1068}
1069
1070
1071void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1072 // 1. Make sure we have at least one argument.
1073 // a0: actual number of arguments
1074 { Label done;
1075 __ Branch(&done, ne, a0, Operand(zero_reg));
1076 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
1077 __ push(a6);
1078 __ Daddu(a0, a0, Operand(1));
1079 __ bind(&done);
1080 }
1081
1082 // 2. Get the function to call (passed as receiver) from the stack, check
1083 // if it is a function.
1084 // a0: actual number of arguments
1085 Label slow, non_function;
1086 __ dsll(at, a0, kPointerSizeLog2);
1087 __ daddu(at, sp, at);
1088 __ ld(a1, MemOperand(at));
1089 __ JumpIfSmi(a1, &non_function);
1090 __ GetObjectType(a1, a2, a2);
1091 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1092
1093 // 3a. Patch the first argument if necessary when calling a function.
1094 // a0: actual number of arguments
1095 // a1: function
1096 Label shift_arguments;
1097 __ li(a4, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1098 { Label convert_to_object, use_global_proxy, patch_receiver;
1099 // Change context eagerly in case we need the global receiver.
1100 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1101
1102 // Do not transform the receiver for strict mode functions.
1103 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1104 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset));
1105 __ And(a7, a3, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1106 __ Branch(&shift_arguments, ne, a7, Operand(zero_reg));
1107
1108 // Do not transform the receiver for native (Compilerhints already in a3).
1109 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
1110 __ And(a7, a3, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte));
1111 __ Branch(&shift_arguments, ne, a7, Operand(zero_reg));
1112
1113 // Compute the receiver in sloppy mode.
1114 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1115 __ dsll(at, a0, kPointerSizeLog2);
1116 __ daddu(a2, sp, at);
1117 __ ld(a2, MemOperand(a2, -kPointerSize));
1118 // a0: actual number of arguments
1119 // a1: function
1120 // a2: first argument
1121 __ JumpIfSmi(a2, &convert_to_object, a6);
1122
1123 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1124 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1125 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1126 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1127
1128 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1129 __ GetObjectType(a2, a3, a3);
1130 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1131
1132 __ bind(&convert_to_object);
1133 // Enter an internal frame in order to preserve argument count.
1134 {
1135 FrameScope scope(masm, StackFrame::INTERNAL);
1136 __ SmiTag(a0);
1137 __ Push(a0, a2);
1138 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1139 __ mov(a2, v0);
1140
1141 __ pop(a0);
1142 __ SmiUntag(a0);
1143 // Leave internal frame.
1144 }
1145 // Restore the function to a1, and the flag to a4.
1146 __ dsll(at, a0, kPointerSizeLog2);
1147 __ daddu(at, sp, at);
1148 __ ld(a1, MemOperand(at));
1149 __ Branch(USE_DELAY_SLOT, &patch_receiver);
1150 __ li(a4, Operand(0, RelocInfo::NONE32));
1151
1152 __ bind(&use_global_proxy);
1153 __ ld(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1154 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
1155
1156 __ bind(&patch_receiver);
1157 __ dsll(at, a0, kPointerSizeLog2);
1158 __ daddu(a3, sp, at);
1159 __ sd(a2, MemOperand(a3, -kPointerSize));
1160
1161 __ Branch(&shift_arguments);
1162 }
1163
1164 // 3b. Check for function proxy.
1165 __ bind(&slow);
1166 __ li(a4, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1167 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1168
1169 __ bind(&non_function);
1170 __ li(a4, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1171
1172 // 3c. Patch the first argument when calling a non-function. The
1173 // CALL_NON_FUNCTION builtin expects the non-function callee as
1174 // receiver, so overwrite the first argument which will ultimately
1175 // become the receiver.
1176 // a0: actual number of arguments
1177 // a1: function
1178 // a4: call type (0: JS function, 1: function proxy, 2: non-function)
1179 __ dsll(at, a0, kPointerSizeLog2);
1180 __ daddu(a2, sp, at);
1181 __ sd(a1, MemOperand(a2, -kPointerSize));
1182
1183 // 4. Shift arguments and return address one slot down on the stack
1184 // (overwriting the original receiver). Adjust argument count to make
1185 // the original first argument the new receiver.
1186 // a0: actual number of arguments
1187 // a1: function
1188 // a4: call type (0: JS function, 1: function proxy, 2: non-function)
1189 __ bind(&shift_arguments);
1190 { Label loop;
1191 // Calculate the copy start address (destination). Copy end address is sp.
1192 __ dsll(at, a0, kPointerSizeLog2);
1193 __ daddu(a2, sp, at);
1194
1195 __ bind(&loop);
1196 __ ld(at, MemOperand(a2, -kPointerSize));
1197 __ sd(at, MemOperand(a2));
1198 __ Dsubu(a2, a2, Operand(kPointerSize));
1199 __ Branch(&loop, ne, a2, Operand(sp));
1200 // Adjust the actual number of arguments and remove the top element
1201 // (which is a copy of the last argument).
1202 __ Dsubu(a0, a0, Operand(1));
1203 __ Pop();
1204 }
1205
1206 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1207 // or a function proxy via CALL_FUNCTION_PROXY.
1208 // a0: actual number of arguments
1209 // a1: function
1210 // a4: call type (0: JS function, 1: function proxy, 2: non-function)
1211 { Label function, non_proxy;
1212 __ Branch(&function, eq, a4, Operand(zero_reg));
1213 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1214 __ mov(a2, zero_reg);
1215 __ Branch(&non_proxy, ne, a4, Operand(1));
1216
1217 __ push(a1); // Re-add proxy object as additional argument.
1218 __ Daddu(a0, a0, Operand(1));
1219 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1220 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1221 RelocInfo::CODE_TARGET);
1222
1223 __ bind(&non_proxy);
1224 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1225 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1226 RelocInfo::CODE_TARGET);
1227 __ bind(&function);
1228 }
1229
1230 // 5b. Get the code to call from the function and check that the number of
1231 // expected arguments matches what we're providing. If so, jump
1232 // (tail-call) to the code in register edx without checking arguments.
1233 // a0: actual number of arguments
1234 // a1: function
1235 __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1236 // The argument count is stored as int32_t on 64-bit platforms.
1237 // TODO(plind): Smi on 32-bit platforms.
1238 __ lw(a2,
1239 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1240 // Check formal and actual parameter counts.
1241 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1242 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1243
1244 __ ld(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1245 ParameterCount expected(0);
1246 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1247}
1248
1249
1250void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1251 const int kIndexOffset =
1252 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1253 const int kLimitOffset =
1254 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1255 const int kArgsOffset = 2 * kPointerSize;
1256 const int kRecvOffset = 3 * kPointerSize;
1257 const int kFunctionOffset = 4 * kPointerSize;
1258
1259 {
1260 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1261 __ ld(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1262 __ push(a0);
1263 __ ld(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1264 __ push(a0);
1265 // Returns (in v0) number of arguments to copy to stack as Smi.
1266 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1267
1268 // Check the stack for overflow. We are not trying to catch
1269 // interruptions (e.g. debug break and preemption) here, so the "real stack
1270 // limit" is checked.
1271 Label okay;
1272 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1273 // Make a2 the space we have left. The stack might already be overflowed
1274 // here which will cause a2 to become negative.
1275 __ dsubu(a2, sp, a2);
1276 // Check if the arguments will overflow the stack.
1277 __ SmiScale(a7, v0, kPointerSizeLog2);
1278 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
1279
1280 // Out of stack space.
1281 __ ld(a1, MemOperand(fp, kFunctionOffset));
1282 __ Push(a1, v0);
1283 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1284 // End of stack check.
1285
1286 // Push current limit and index.
1287 __ bind(&okay);
1288 __ mov(a1, zero_reg);
1289 __ Push(v0, a1); // Limit and initial index.
1290
1291 // Get the receiver.
1292 __ ld(a0, MemOperand(fp, kRecvOffset));
1293
1294 // Check that the function is a JS function (otherwise it must be a proxy).
1295 Label push_receiver;
1296 __ ld(a1, MemOperand(fp, kFunctionOffset));
1297 __ GetObjectType(a1, a2, a2);
1298 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1299
1300 // Change context eagerly to get the right global object if necessary.
1301 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1302 // Load the shared function info while the function is still in a1.
1303 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1304
1305 // Compute the receiver.
1306 // Do not transform the receiver for strict mode functions.
1307 Label call_to_object, use_global_proxy;
1308 __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset));
1309 __ And(a7, a7, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1310 __ Branch(&push_receiver, ne, a7, Operand(zero_reg));
1311
1312 // Do not transform the receiver for native (Compilerhints already in a2).
1313 __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
1314 __ And(a7, a7, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte));
1315 __ Branch(&push_receiver, ne, a7, Operand(zero_reg));
1316
1317 // Compute the receiver in sloppy mode.
1318 __ JumpIfSmi(a0, &call_to_object);
1319 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1320 __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1321 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1322 __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1323
1324 // Check if the receiver is already a JavaScript object.
1325 // a0: receiver
1326 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1327 __ GetObjectType(a0, a1, a1);
1328 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1329
1330 // Convert the receiver to a regular object.
1331 // a0: receiver
1332 __ bind(&call_to_object);
1333 __ push(a0);
1334 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1335 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1336 __ Branch(&push_receiver);
1337
1338 __ bind(&use_global_proxy);
1339 __ ld(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1340 __ ld(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
1341
1342 // Push the receiver.
1343 // a0: receiver
1344 __ bind(&push_receiver);
1345 __ push(a0);
1346
1347 // Copy all arguments from the array to the stack.
1348 Label entry, loop;
1349 __ ld(a0, MemOperand(fp, kIndexOffset));
1350 __ Branch(&entry);
1351
1352 // Load the current argument from the arguments array and push it to the
1353 // stack.
1354 // a0: current argument index
1355 __ bind(&loop);
1356 __ ld(a1, MemOperand(fp, kArgsOffset));
1357 __ Push(a1, a0);
1358
1359 // Call the runtime to access the property in the arguments array.
1360 __ CallRuntime(Runtime::kGetProperty, 2);
1361 __ push(v0);
1362
1363 // Use inline caching to access the arguments.
1364 __ ld(a0, MemOperand(fp, kIndexOffset));
1365 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1366 __ sd(a0, MemOperand(fp, kIndexOffset));
1367
1368 // Test if the copy loop has finished copying all the elements from the
1369 // arguments object.
1370 __ bind(&entry);
1371 __ ld(a1, MemOperand(fp, kLimitOffset));
1372 __ Branch(&loop, ne, a0, Operand(a1));
1373
1374 // Call the function.
1375 Label call_proxy;
1376 ParameterCount actual(a0);
1377 __ SmiUntag(a0);
1378 __ ld(a1, MemOperand(fp, kFunctionOffset));
1379 __ GetObjectType(a1, a2, a2);
1380 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1381
1382 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1383
1384 frame_scope.GenerateLeaveFrame();
1385 __ Ret(USE_DELAY_SLOT);
1386 __ Daddu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1387
1388 // Call the function proxy.
1389 __ bind(&call_proxy);
1390 __ push(a1); // Add function proxy as last argument.
1391 __ Daddu(a0, a0, Operand(1));
1392 __ li(a2, Operand(0, RelocInfo::NONE32));
1393 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1394 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1395 RelocInfo::CODE_TARGET);
1396 // Tear down the internal frame and remove function, receiver and args.
1397 }
1398
1399 __ Ret(USE_DELAY_SLOT);
1400 __ Daddu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1401}
1402
1403
1404static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1405 Label* stack_overflow) {
1406 // ----------- S t a t e -------------
1407 // -- a0 : actual number of arguments
1408 // -- a1 : function (passed through to callee)
1409 // -- a2 : expected number of arguments
1410 // -----------------------------------
1411 // Check the stack for overflow. We are not trying to catch
1412 // interruptions (e.g. debug break and preemption) here, so the "real stack
1413 // limit" is checked.
1414 __ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
1415 // Make a5 the space we have left. The stack might already be overflowed
1416 // here which will cause a5 to become negative.
1417 __ dsubu(a5, sp, a5);
1418 // Check if the arguments will overflow the stack.
1419 __ dsll(at, a2, kPointerSizeLog2);
1420 // Signed comparison.
1421 __ Branch(stack_overflow, le, a5, Operand(at));
1422}
1423
1424
1425static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1426 // __ sll(a0, a0, kSmiTagSize);
1427 __ dsll32(a0, a0, 0);
1428 __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1429 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1430 __ Daddu(fp, sp,
1431 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1432}
1433
1434
1435static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1436 // ----------- S t a t e -------------
1437 // -- v0 : result being passed through
1438 // -----------------------------------
1439 // Get the number of arguments passed (as a smi), tear down the frame and
1440 // then tear down the parameters.
1441 __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1442 kPointerSize)));
1443 __ mov(sp, fp);
1444 __ MultiPop(fp.bit() | ra.bit());
1445 __ SmiScale(a4, a1, kPointerSizeLog2);
1446 __ Daddu(sp, sp, a4);
1447 // Adjust for the receiver.
1448 __ Daddu(sp, sp, Operand(kPointerSize));
1449}
1450
1451
1452void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1453 // State setup as expected by MacroAssembler::InvokePrologue.
1454 // ----------- S t a t e -------------
1455 // -- a0: actual arguments count
1456 // -- a1: function (passed through to callee)
1457 // -- a2: expected arguments count
1458 // -----------------------------------
1459
1460 Label stack_overflow;
1461 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1462 Label invoke, dont_adapt_arguments;
1463
1464 Label enough, too_few;
1465 __ ld(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1466 __ Branch(&dont_adapt_arguments, eq,
1467 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1468 // We use Uless as the number of argument should always be greater than 0.
1469 __ Branch(&too_few, Uless, a0, Operand(a2));
1470
1471 { // Enough parameters: actual >= expected.
1472 // a0: actual number of arguments as a smi
1473 // a1: function
1474 // a2: expected number of arguments
1475 // a3: code entry to call
1476 __ bind(&enough);
1477 EnterArgumentsAdaptorFrame(masm);
1478
1479 // Calculate copy start address into a0 and copy end address into a2.
1480 __ SmiScale(a0, a0, kPointerSizeLog2);
1481 __ Daddu(a0, fp, a0);
1482 // Adjust for return address and receiver.
1483 __ Daddu(a0, a0, Operand(2 * kPointerSize));
1484 // Compute copy end address.
1485 __ dsll(a2, a2, kPointerSizeLog2);
1486 __ dsubu(a2, a0, a2);
1487
1488 // Copy the arguments (including the receiver) to the new stack frame.
1489 // a0: copy start address
1490 // a1: function
1491 // a2: copy end address
1492 // a3: code entry to call
1493
1494 Label copy;
1495 __ bind(&copy);
1496 __ ld(a4, MemOperand(a0));
1497 __ push(a4);
1498 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1499 __ daddiu(a0, a0, -kPointerSize); // In delay slot.
1500
1501 __ jmp(&invoke);
1502 }
1503
1504 { // Too few parameters: Actual < expected.
1505 __ bind(&too_few);
1506 EnterArgumentsAdaptorFrame(masm);
1507
1508 // Calculate copy start address into a0 and copy end address is fp.
1509 // a0: actual number of arguments as a smi
1510 // a1: function
1511 // a2: expected number of arguments
1512 // a3: code entry to call
1513 __ SmiScale(a0, a0, kPointerSizeLog2);
1514 __ Daddu(a0, fp, a0);
1515 // Adjust for return address and receiver.
1516 __ Daddu(a0, a0, Operand(2 * kPointerSize));
1517 // Compute copy end address. Also adjust for return address.
1518 __ Daddu(a7, fp, kPointerSize);
1519
1520 // Copy the arguments (including the receiver) to the new stack frame.
1521 // a0: copy start address
1522 // a1: function
1523 // a2: expected number of arguments
1524 // a3: code entry to call
1525 // a7: copy end address
1526 Label copy;
1527 __ bind(&copy);
1528 __ ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver.
1529 __ Dsubu(sp, sp, kPointerSize);
1530 __ Dsubu(a0, a0, kPointerSize);
1531 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
1532 __ sd(a4, MemOperand(sp)); // In the delay slot.
1533
1534 // Fill the remaining expected arguments with undefined.
1535 // a1: function
1536 // a2: expected number of arguments
1537 // a3: code entry to call
1538 __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
1539 __ dsll(a6, a2, kPointerSizeLog2);
1540 __ Dsubu(a2, fp, Operand(a6));
1541 // Adjust for frame.
1542 __ Dsubu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1543 2 * kPointerSize));
1544
1545 Label fill;
1546 __ bind(&fill);
1547 __ Dsubu(sp, sp, kPointerSize);
1548 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1549 __ sd(a4, MemOperand(sp));
1550 }
1551
1552 // Call the entry point.
1553 __ bind(&invoke);
1554
1555 __ Call(a3);
1556
1557 // Store offset of return address for deoptimizer.
1558 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1559
1560 // Exit frame and return.
1561 LeaveArgumentsAdaptorFrame(masm);
1562 __ Ret();
1563
1564
1565 // -------------------------------------------
1566 // Don't adapt arguments.
1567 // -------------------------------------------
1568 __ bind(&dont_adapt_arguments);
1569 __ Jump(a3);
1570
1571 __ bind(&stack_overflow);
1572 {
1573 FrameScope frame(masm, StackFrame::MANUAL);
1574 EnterArgumentsAdaptorFrame(masm);
1575 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1576 __ break_(0xCC);
1577 }
1578}
1579
1580
1581#undef __
1582
1583} } // namespace v8::internal
1584
1585#endif // V8_TARGET_ARCH_MIPS64