blob: 1d8d5d35990e76cb26e86895a6f11190046bbb70 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS64
6
7#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19
20void Builtins::Generate_Adaptor(MacroAssembler* masm,
21 CFunctionId id,
22 BuiltinExtraArguments extra_args) {
23 // ----------- S t a t e -------------
24 // -- a0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025 // -- a1 : target
26 // -- a3 : new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000027 // -- sp[0] : last argument
28 // -- ...
29 // -- sp[8 * (argc - 1)] : first argument
30 // -- sp[8 * agrc] : receiver
31 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 __ AssertFunction(a1);
33
34 // Make sure we operate in the context of the called function (for example
35 // ConstructStubs implemented in C++ will be run in the context of the caller
36 // instead of the callee, due to the way that [[Construct]] is defined for
37 // ordinary functions).
38 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039
40 // Insert extra arguments.
41 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 switch (extra_args) {
43 case BuiltinExtraArguments::kTarget:
44 __ Push(a1);
45 ++num_extra_args;
46 break;
47 case BuiltinExtraArguments::kNewTarget:
48 __ Push(a3);
49 ++num_extra_args;
50 break;
51 case BuiltinExtraArguments::kTargetAndNewTarget:
52 __ Push(a1, a3);
53 num_extra_args += 2;
54 break;
55 case BuiltinExtraArguments::kNone:
56 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000057 }
58
Emily Bernierd0a1eb72015-03-24 16:35:39 -040059 // JumpToExternalReference expects a0 to contain the number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 // including the receiver and the extra arguments.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040061 __ Daddu(a0, a0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000062
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
64}
65
66
67// Load the built-in InternalArray function from the current context.
68static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
69 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072}
73
74
75// Load the built-in Array function from the current context.
76static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 // Load the Array function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079}
80
81
82void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
83 // ----------- S t a t e -------------
84 // -- a0 : number of arguments
85 // -- ra : return address
86 // -- sp[...]: constructor arguments
87 // -----------------------------------
88 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
89
90 // Get the InternalArray function.
91 GenerateLoadInternalArrayFunction(masm, a1);
92
93 if (FLAG_debug_code) {
94 // Initial map for the builtin InternalArray functions should be maps.
95 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
96 __ SmiTst(a2, a4);
97 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
98 a4, Operand(zero_reg));
99 __ GetObjectType(a2, a3, a4);
100 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
101 a4, Operand(MAP_TYPE));
102 }
103
104 // Run the native code for the InternalArray function called as a normal
105 // function.
106 // Tail call a stub.
107 InternalArrayConstructorStub stub(masm->isolate());
108 __ TailCallStub(&stub);
109}
110
111
112void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
113 // ----------- S t a t e -------------
114 // -- a0 : number of arguments
115 // -- ra : return address
116 // -- sp[...]: constructor arguments
117 // -----------------------------------
118 Label generic_array_code;
119
120 // Get the Array function.
121 GenerateLoadArrayFunction(masm, a1);
122
123 if (FLAG_debug_code) {
124 // Initial map for the builtin Array functions should be maps.
125 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
126 __ SmiTst(a2, a4);
127 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
128 a4, Operand(zero_reg));
129 __ GetObjectType(a2, a3, a4);
130 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
131 a4, Operand(MAP_TYPE));
132 }
133
134 // Run the native code for the Array function called as a normal function.
135 // Tail call a stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000136 __ mov(a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000137 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
138 ArrayConstructorStub stub(masm->isolate());
139 __ TailCallStub(&stub);
140}
141
142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100144void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
145 // ----------- S t a t e -------------
146 // -- a0 : number of arguments
147 // -- ra : return address
148 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
149 // -- sp[(argc + 1) * 8] : receiver
150 // -----------------------------------
151 Condition const cc = (kind == MathMaxMinKind::kMin) ? ge : le;
152 Heap::RootListIndex const root_index =
153 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
154 : Heap::kMinusInfinityValueRootIndex;
155 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? f2 : f0;
156
157 // Load the accumulator with the default return value (either -Infinity or
158 // +Infinity), with the tagged value in a1 and the double value in f0.
159 __ LoadRoot(a1, root_index);
160 __ ldc1(f0, FieldMemOperand(a1, HeapNumber::kValueOffset));
161 __ mov(a3, a0);
162
163 Label done_loop, loop;
164 __ bind(&loop);
165 {
166 // Check if all parameters done.
167 __ Dsubu(a0, a0, Operand(1));
168 __ Branch(&done_loop, lt, a0, Operand(zero_reg));
169
170 // Load the next parameter tagged value into a2.
171 __ Dlsa(at, sp, a0, kPointerSizeLog2);
172 __ ld(a2, MemOperand(at));
173
174 // Load the double value of the parameter into f2, maybe converting the
175 // parameter to a number first using the ToNumberStub if necessary.
176 Label convert, convert_smi, convert_number, done_convert;
177 __ bind(&convert);
178 __ JumpIfSmi(a2, &convert_smi);
179 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset));
180 __ JumpIfRoot(a4, Heap::kHeapNumberMapRootIndex, &convert_number);
181 {
182 // Parameter is not a Number, use the ToNumberStub to convert it.
183 FrameScope scope(masm, StackFrame::INTERNAL);
184 __ SmiTag(a0);
185 __ SmiTag(a3);
186 __ Push(a0, a1, a3);
187 __ mov(a0, a2);
188 ToNumberStub stub(masm->isolate());
189 __ CallStub(&stub);
190 __ mov(a2, v0);
191 __ Pop(a0, a1, a3);
192 {
193 // Restore the double accumulator value (f0).
194 Label restore_smi, done_restore;
195 __ JumpIfSmi(a1, &restore_smi);
196 __ ldc1(f0, FieldMemOperand(a1, HeapNumber::kValueOffset));
197 __ jmp(&done_restore);
198 __ bind(&restore_smi);
199 __ SmiToDoubleFPURegister(a1, f0, a4);
200 __ bind(&done_restore);
201 }
202 __ SmiUntag(a3);
203 __ SmiUntag(a0);
204 }
205 __ jmp(&convert);
206 __ bind(&convert_number);
207 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
208 __ jmp(&done_convert);
209 __ bind(&convert_smi);
210 __ SmiToDoubleFPURegister(a2, f2, a4);
211 __ bind(&done_convert);
212
213 // Perform the actual comparison with the accumulator value on the left hand
214 // side (f0) and the next parameter value on the right hand side (f2).
215 Label compare_equal, compare_nan, compare_swap;
216 __ BranchF(&compare_equal, &compare_nan, eq, f0, f2);
217 __ BranchF(&compare_swap, nullptr, cc, f0, f2);
218 __ Branch(&loop);
219
220 // Left and right hand side are equal, check for -0 vs. +0.
221 __ bind(&compare_equal);
222 __ FmoveHigh(a4, reg);
223 // Make a4 unsigned.
224 __ dsll32(a4, a4, 0);
225 __ Branch(&loop, ne, a4, Operand(0x8000000000000000));
226
227 // Result is on the right hand side.
228 __ bind(&compare_swap);
229 __ mov_d(f0, f2);
230 __ mov(a1, a2);
231 __ jmp(&loop);
232
233 // At least one side is NaN, which means that the result will be NaN too.
234 __ bind(&compare_nan);
235 __ LoadRoot(a1, Heap::kNanValueRootIndex);
236 __ ldc1(f0, FieldMemOperand(a1, HeapNumber::kValueOffset));
237 __ jmp(&loop);
238 }
239
240 __ bind(&done_loop);
241 __ Dlsa(sp, sp, a3, kPointerSizeLog2);
242 __ mov(v0, a1);
243 __ DropAndRet(1);
244}
245
246// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 // ----------- S t a t e -------------
249 // -- a0 : number of arguments
250 // -- a1 : constructor function
251 // -- ra : return address
252 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
253 // -- sp[argc * 8] : receiver
254 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 // 1. Load the first argument into a0 and get rid of the rest (including the
257 // receiver).
258 Label no_arguments;
259 {
260 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
261 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100262 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263 __ ld(a0, MemOperand(sp));
264 __ Drop(2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000265 }
266
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 // 2a. Convert first argument to number.
268 ToNumberStub stub(masm->isolate());
269 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271 // 2b. No arguments, return +0.
272 __ bind(&no_arguments);
273 __ Move(v0, Smi::FromInt(0));
274 __ DropAndRet(1);
275}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277
278void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000279 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280 // -- a0 : number of arguments
281 // -- a1 : constructor function
282 // -- a3 : new target
283 // -- ra : return address
284 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
285 // -- sp[argc * 8] : receiver
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000286 // -----------------------------------
287
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000288 // 1. Make sure we operate in the context of the called function.
289 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000291 // 2. Load the first argument into a0 and get rid of the rest (including the
292 // receiver).
293 {
294 Label no_arguments, done;
295 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
296 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100297 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000298 __ ld(a0, MemOperand(sp));
299 __ Drop(2);
300 __ jmp(&done);
301 __ bind(&no_arguments);
302 __ Move(a0, Smi::FromInt(0));
303 __ Drop(1);
304 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307 // 3. Make sure a0 is a number.
308 {
309 Label done_convert;
310 __ JumpIfSmi(a0, &done_convert);
311 __ GetObjectType(a0, a2, a2);
312 __ Branch(&done_convert, eq, t0, Operand(HEAP_NUMBER_TYPE));
313 {
314 FrameScope scope(masm, StackFrame::INTERNAL);
315 __ Push(a1, a3);
316 ToNumberStub stub(masm->isolate());
317 __ CallStub(&stub);
318 __ Move(a0, v0);
319 __ Pop(a1, a3);
320 }
321 __ bind(&done_convert);
322 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 // 4. Check if new target and constructor differ.
325 Label new_object;
326 __ Branch(&new_object, ne, a1, Operand(a3));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 // 5. Allocate a JSValue wrapper for the number.
329 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 __ Ret();
331
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 // 6. Fallback to the runtime to create new object.
333 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000334 {
335 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100336 __ Push(a0);
337 FastNewObjectStub stub(masm->isolate());
338 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000339 __ Pop(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000340 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000341 __ Ret(USE_DELAY_SLOT);
342 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot.
343}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000344
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345
346// static
347void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
348 // ----------- S t a t e -------------
349 // -- a0 : number of arguments
350 // -- a1 : constructor function
351 // -- ra : return address
352 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
353 // -- sp[argc * 8] : receiver
354 // -----------------------------------
355
356 // 1. Load the first argument into a0 and get rid of the rest (including the
357 // receiver).
358 Label no_arguments;
359 {
360 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
361 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100362 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000363 __ ld(a0, MemOperand(sp));
364 __ Drop(2);
365 }
366
367 // 2a. At least one argument, return a0 if it's a string, otherwise
368 // dispatch to appropriate conversion.
369 Label to_string, symbol_descriptive_string;
370 {
371 __ JumpIfSmi(a0, &to_string);
372 __ GetObjectType(a0, a1, a1);
373 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
374 __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
375 __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
376 __ Branch(&to_string, gt, a1, Operand(zero_reg));
377 __ Ret(USE_DELAY_SLOT);
378 __ mov(v0, a0);
379 }
380
381 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000383 {
384 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
385 __ DropAndRet(1);
386 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000387
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000388 // 3a. Convert a0 to a string.
389 __ bind(&to_string);
390 {
391 ToStringStub stub(masm->isolate());
392 __ TailCallStub(&stub);
393 }
394
395 // 3b. Convert symbol in a0 to a string.
396 __ bind(&symbol_descriptive_string);
397 {
398 __ Push(a0);
399 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
400 }
401}
402
403
404void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
405 // ----------- S t a t e -------------
406 // -- a0 : number of arguments
407 // -- a1 : constructor function
408 // -- a3 : new target
409 // -- ra : return address
410 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
411 // -- sp[argc * 8] : receiver
412 // -----------------------------------
413
414 // 1. Make sure we operate in the context of the called function.
415 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
416
417 // 2. Load the first argument into a0 and get rid of the rest (including the
418 // receiver).
419 {
420 Label no_arguments, done;
421 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
422 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100423 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000424 __ ld(a0, MemOperand(sp));
425 __ Drop(2);
426 __ jmp(&done);
427 __ bind(&no_arguments);
428 __ LoadRoot(a0, Heap::kempty_stringRootIndex);
429 __ Drop(1);
430 __ bind(&done);
431 }
432
433 // 3. Make sure a0 is a string.
434 {
435 Label convert, done_convert;
436 __ JumpIfSmi(a0, &convert);
437 __ GetObjectType(a0, a2, a2);
438 __ And(t0, a2, Operand(kIsNotStringMask));
439 __ Branch(&done_convert, eq, t0, Operand(zero_reg));
440 __ bind(&convert);
441 {
442 FrameScope scope(masm, StackFrame::INTERNAL);
443 ToStringStub stub(masm->isolate());
444 __ Push(a1, a3);
445 __ CallStub(&stub);
446 __ Move(a0, v0);
447 __ Pop(a1, a3);
448 }
449 __ bind(&done_convert);
450 }
451
452 // 4. Check if new target and constructor differ.
453 Label new_object;
454 __ Branch(&new_object, ne, a1, Operand(a3));
455
456 // 5. Allocate a JSValue wrapper for the string.
457 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
458 __ Ret();
459
460 // 6. Fallback to the runtime to create new object.
461 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 {
463 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464 __ Push(a0);
465 FastNewObjectStub stub(masm->isolate());
466 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000467 __ Pop(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000468 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 __ Ret(USE_DELAY_SLOT);
470 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471}
472
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000473static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
474 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
475 __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
476 __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
477 __ Jump(at);
478}
479
Ben Murdoch097c5b22016-05-18 11:27:45 +0100480static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
481 Runtime::FunctionId function_id) {
482 // ----------- S t a t e -------------
483 // -- a0 : argument count (preserved for callee)
484 // -- a1 : target function (preserved for callee)
485 // -- a3 : new target (preserved for callee)
486 // -----------------------------------
487 {
488 FrameScope scope(masm, StackFrame::INTERNAL);
489 // Push a copy of the function onto the stack.
490 // Push a copy of the target function and the new target.
491 __ SmiTag(a0);
492 __ Push(a0, a1, a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493
Ben Murdoch097c5b22016-05-18 11:27:45 +0100494 __ CallRuntime(function_id, 1);
495 // Restore target function and new target.
496 __ Pop(a0, a1, a3);
497 __ SmiUntag(a0);
498 }
499
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
501 __ Jump(at);
502}
503
504
505void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
506 // Checking whether the queued function is ready for install is optional,
507 // since we come across interrupts and stack checks elsewhere. However,
508 // not checking may delay installing ready functions, and always checking
509 // would be quite expensive. A good compromise is to first check against
510 // stack limit as a cue for an interrupt signal.
511 Label ok;
512 __ LoadRoot(a4, Heap::kStackLimitRootIndex);
513 __ Branch(&ok, hs, sp, Operand(a4));
514
Ben Murdoch097c5b22016-05-18 11:27:45 +0100515 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000516
517 __ bind(&ok);
518 GenerateTailCallToSharedCode(masm);
519}
520
521
522static void Generate_JSConstructStubHelper(MacroAssembler* masm,
523 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100524 bool create_implicit_receiver,
525 bool check_derived_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000526 // ----------- S t a t e -------------
527 // -- a0 : number of arguments
528 // -- a1 : constructor function
529 // -- a2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000530 // -- a3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000531 // -- ra : return address
532 // -- sp[...]: constructor arguments
533 // -----------------------------------
534
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000535 Isolate* isolate = masm->isolate();
536
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000537 // Enter a construct frame.
538 {
539 FrameScope scope(masm, StackFrame::CONSTRUCT);
540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 // Preserve the incoming parameters on the stack.
542 __ AssertUndefinedOrAllocationSite(a2, t0);
543 __ SmiTag(a0);
544 __ Push(a2, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000545
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000546 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100547 __ Push(a1, a3);
548 FastNewObjectStub stub(masm->isolate());
549 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550 __ mov(t0, v0);
551 __ Pop(a1, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000552
Ben Murdoch097c5b22016-05-18 11:27:45 +0100553 // ----------- S t a t e -------------
554 // -- a1: constructor function
555 // -- a3: new target
556 // -- t0: newly allocated object
557 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000558 __ ld(a0, MemOperand(sp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000560 __ SmiUntag(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000562 if (create_implicit_receiver) {
563 // Push the allocated receiver to the stack. We need two copies
564 // because we may have to return the original one and the calling
565 // conventions dictate that the called function pops the receiver.
566 __ Push(t0, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000567 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000568 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570
571 // Set up pointer to last argument.
572 __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
573
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 // Copy arguments and receiver to the expression stack.
575 // a0: number of arguments
576 // a1: constructor function
577 // a2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000578 // a3: new target
579 // t0: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580 // sp[0]: receiver
581 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000582 // sp[2]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583 Label loop, entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000584 __ mov(t0, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 __ jmp(&entry);
586 __ bind(&loop);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100587 __ Dlsa(a4, a2, t0, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000588 __ ld(a5, MemOperand(a4));
589 __ push(a5);
590 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 __ Daddu(t0, t0, Operand(-1));
592 __ Branch(&loop, greater_equal, t0, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000593
594 // Call the function.
595 // a0: number of arguments
596 // a1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000597 // a3: new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598 if (is_api_function) {
599 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
600 Handle<Code> code =
601 masm->isolate()->builtins()->HandleApiCallConstruct();
602 __ Call(code, RelocInfo::CODE_TARGET);
603 } else {
604 ParameterCount actual(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
606 CheckDebugStepCallWrapper());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607 }
608
609 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000610 if (create_implicit_receiver && !is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
612 }
613
614 // Restore context from the frame.
615 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
616
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000617 if (create_implicit_receiver) {
618 // If the result is an object (in the ECMA sense), we should get rid
619 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
620 // on page 74.
621 Label use_receiver, exit;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000623 // If the result is a smi, it is *not* an object in the ECMA sense.
624 // v0: result
625 // sp[0]: receiver (newly allocated object)
626 // sp[1]: number of arguments (smi-tagged)
627 __ JumpIfSmi(v0, &use_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629 // If the type of the result (stored in its map) is less than
630 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
631 __ GetObjectType(v0, a1, a3);
632 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000633
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 // Throw away the result of the constructor invocation and use the
635 // on-stack receiver as the result.
636 __ bind(&use_receiver);
637 __ ld(v0, MemOperand(sp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000639 // Remove receiver from the stack, remove caller arguments, and
640 // return.
641 __ bind(&exit);
642 // v0: result
643 // sp[0]: receiver (newly allocated object)
644 // sp[1]: number of arguments (smi-tagged)
645 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
646 } else {
647 __ ld(a1, MemOperand(sp));
648 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000649
650 // Leave construct frame.
651 }
652
Ben Murdoch097c5b22016-05-18 11:27:45 +0100653 // ES6 9.2.2. Step 13+
654 // Check that the result is not a Smi, indicating that the constructor result
655 // from a derived class is neither undefined nor an Object.
656 if (check_derived_construct) {
657 Label dont_throw;
658 __ JumpIfNotSmi(v0, &dont_throw);
659 {
660 FrameScope scope(masm, StackFrame::INTERNAL);
661 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
662 }
663 __ bind(&dont_throw);
664 }
665
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000666 __ SmiScale(a4, a1, kPointerSizeLog2);
667 __ Daddu(sp, sp, a4);
668 __ Daddu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000669 if (create_implicit_receiver) {
670 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
671 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000672 __ Ret();
673}
674
675
676void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100677 Generate_JSConstructStubHelper(masm, false, true, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678}
679
680
681void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100682 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000683}
684
685
686void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100687 Generate_JSConstructStubHelper(masm, false, false, false);
688}
689
690
691void Builtins::Generate_JSBuiltinsConstructStubForDerived(
692 MacroAssembler* masm) {
693 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694}
695
696
697void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
698 FrameScope scope(masm, StackFrame::INTERNAL);
699 __ Push(a1);
700 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
701}
702
703
704enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
705
706
707// Clobbers a2; preserves all other registers.
708static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
709 IsTagged argc_is_tagged) {
710 // Check the stack for overflow. We are not trying to catch
711 // interruptions (e.g. debug break and preemption) here, so the "real stack
712 // limit" is checked.
713 Label okay;
714 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
715 // Make a2 the space we have left. The stack might already be overflowed
716 // here which will cause r2 to become negative.
717 __ dsubu(a2, sp, a2);
718 // Check if the arguments will overflow the stack.
719 if (argc_is_tagged == kArgcIsSmiTagged) {
720 __ SmiScale(a7, v0, kPointerSizeLog2);
721 } else {
722 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
723 __ dsll(a7, argc, kPointerSizeLog2);
724 }
725 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
726
727 // Out of stack space.
728 __ CallRuntime(Runtime::kThrowStackOverflow);
729
730 __ bind(&okay);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000731}
732
733
734static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
735 bool is_construct) {
736 // Called from JSEntryStub::GenerateBody
737
738 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000739 // -- a0: new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740 // -- a1: function
741 // -- a2: receiver_pointer
742 // -- a3: argc
743 // -- s0: argv
744 // -----------------------------------
745 ProfileEntryHookStub::MaybeCallEntryHook(masm);
746 // Clear the context before we push it when entering the JS frame.
747 __ mov(cp, zero_reg);
748
749 // Enter an internal frame.
750 {
751 FrameScope scope(masm, StackFrame::INTERNAL);
752
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000753 // Setup the context (we need to use the caller context from the isolate).
754 ExternalReference context_address(Isolate::kContextAddress,
755 masm->isolate());
756 __ li(cp, Operand(context_address));
757 __ ld(cp, MemOperand(cp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000758
759 // Push the function and the receiver onto the stack.
760 __ Push(a1, a2);
761
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000762 // Check if we have enough stack space to push all arguments.
763 // Clobbers a2.
764 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
765
766 // Remember new.target.
767 __ mov(a5, a0);
768
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000769 // Copy arguments to the stack in a loop.
770 // a3: argc
771 // s0: argv, i.e. points to first arg
772 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100773 __ Dlsa(a6, s0, a3, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 __ b(&entry);
775 __ nop(); // Branch delay slot nop.
776 // a6 points past last arg.
777 __ bind(&loop);
778 __ ld(a4, MemOperand(s0)); // Read next parameter.
779 __ daddiu(s0, s0, kPointerSize);
780 __ ld(a4, MemOperand(a4)); // Dereference handle.
781 __ push(a4); // Push parameter.
782 __ bind(&entry);
783 __ Branch(&loop, ne, s0, Operand(a6));
784
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000785 // Setup new.target and argc.
786 __ mov(a0, a3);
787 __ mov(a3, a5);
788
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000789 // Initialize all JavaScript callee-saved registers, since they will be seen
790 // by the garbage collector as part of handlers.
791 __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
792 __ mov(s1, a4);
793 __ mov(s2, a4);
794 __ mov(s3, a4);
795 __ mov(s4, a4);
796 __ mov(s5, a4);
797 // s6 holds the root address. Do not clobber.
798 // s7 is cp. Do not init.
799
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800 // Invoke the code.
801 Handle<Code> builtin = is_construct
802 ? masm->isolate()->builtins()->Construct()
803 : masm->isolate()->builtins()->Call();
804 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000805
806 // Leave internal frame.
807 }
808 __ Jump(ra);
809}
810
811
812void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
813 Generate_JSEntryTrampolineHelper(masm, false);
814}
815
816
817void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
818 Generate_JSEntryTrampolineHelper(masm, true);
819}
820
821
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000822// Generate code for entering a JS function with the interpreter.
823// On entry to the function the receiver and arguments have been pushed on the
824// stack left to right. The actual argument count matches the formal parameter
825// count expected by the function.
826//
827// The live registers are:
828// o a1: the JS function object being called.
829// o a3: the new target
830// o cp: our context
831// o fp: the caller's frame pointer
832// o sp: stack pointer
833// o ra: return address
834//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100835// The function builds an interpreter frame. See InterpreterFrameConstants in
836// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000837void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
838 // Open a frame scope to indicate that there is a frame on the stack. The
839 // MANUAL indicates that the scope shouldn't actually generate code to set up
840 // the frame (that is done below).
841 FrameScope frame_scope(masm, StackFrame::MANUAL);
842
843 __ Push(ra, fp, cp, a1);
844 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000845
846 // Get the bytecode array from the function object and load the pointer to the
847 // first entry into kInterpreterBytecodeRegister.
848 __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100849 Label load_debug_bytecode_array, bytecode_array_loaded;
850 Register debug_info = kInterpreterBytecodeArrayRegister;
851 DCHECK(!debug_info.is(a0));
852 __ ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
853 __ Branch(&load_debug_bytecode_array, ne, debug_info,
854 Operand(DebugInfo::uninitialized()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000855 __ ld(kInterpreterBytecodeArrayRegister,
856 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100857 __ bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000858
859 if (FLAG_debug_code) {
860 // Check function data field is actually a BytecodeArray object.
861 __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
862 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
863 Operand(zero_reg));
864 __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
865 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
866 Operand(BYTECODE_ARRAY_TYPE));
867 }
868
Ben Murdoch097c5b22016-05-18 11:27:45 +0100869 // Push new.target, bytecode array and zero for bytecode array offset.
870 __ Push(a3, kInterpreterBytecodeArrayRegister, zero_reg);
871
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000872 // Allocate the local and temporary register file on the stack.
873 {
874 // Load frame size (word) from the BytecodeArray object.
875 __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
876 BytecodeArray::kFrameSizeOffset));
877
878 // Do a stack check to ensure we don't go over the limit.
879 Label ok;
880 __ Dsubu(a5, sp, Operand(a4));
881 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
882 __ Branch(&ok, hs, a5, Operand(a2));
883 __ CallRuntime(Runtime::kThrowStackOverflow);
884 __ bind(&ok);
885
886 // If ok, push undefined as the initial value for all register file entries.
887 Label loop_header;
888 Label loop_check;
889 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
890 __ Branch(&loop_check);
891 __ bind(&loop_header);
892 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
893 __ push(a5);
894 // Continue loop if not done.
895 __ bind(&loop_check);
896 __ Dsubu(a4, a4, Operand(kPointerSize));
897 __ Branch(&loop_header, ge, a4, Operand(zero_reg));
898 }
899
900 // TODO(rmcilroy): List of things not currently dealt with here but done in
901 // fullcodegen's prologue:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000902 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000903 // - Code aging of the BytecodeArray object.
904
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000905 // Load bytecode offset and dispatch table into registers.
906 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
907 __ Daddu(kInterpreterRegisterFileRegister, fp,
908 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
909 __ li(kInterpreterBytecodeOffsetRegister,
910 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100911 __ li(kInterpreterDispatchTableRegister,
912 Operand(ExternalReference::interpreter_dispatch_table_address(
913 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000914
915 // Dispatch to the first bytecode handler for the function.
916 __ Daddu(a0, kInterpreterBytecodeArrayRegister,
917 kInterpreterBytecodeOffsetRegister);
918 __ lbu(a0, MemOperand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100919 __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000920 __ ld(at, MemOperand(at));
921 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
922 // and header removal.
923 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
924 __ Call(at);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100925
926 // Even though the first bytecode handler was called, we will never return.
927 __ Abort(kUnexpectedReturnFromBytecodeHandler);
928
929 // Load debug copy of the bytecode array.
930 __ bind(&load_debug_bytecode_array);
931 __ ld(kInterpreterBytecodeArrayRegister,
932 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
933 __ Branch(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000934}
935
936
937void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
938 // TODO(rmcilroy): List of things not currently dealt with here but done in
939 // fullcodegen's EmitReturnSequence.
940 // - Supporting FLAG_trace for Runtime::TraceExit.
941 // - Support profiler (specifically decrementing profiling_counter
942 // appropriately and calling out to HandleInterrupts if necessary).
943
944 // The return value is in accumulator, which is already in v0.
945
946 // Leave the frame (also dropping the register file).
947 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
948
949 // Drop receiver + arguments and return.
950 __ lw(at, FieldMemOperand(kInterpreterBytecodeArrayRegister,
951 BytecodeArray::kParameterSizeOffset));
952 __ Daddu(sp, sp, at);
953 __ Jump(ra);
954}
955
956
957// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100958void Builtins::Generate_InterpreterPushArgsAndCallImpl(
959 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000960 // ----------- S t a t e -------------
961 // -- a0 : the number of arguments (not including the receiver)
962 // -- a2 : the address of the first argument to be pushed. Subsequent
963 // arguments should be consecutive above this, in the same order as
964 // they are to be pushed onto the stack.
965 // -- a1 : the target to call (can be any Object).
966 // -----------------------------------
967
968 // Find the address of the last argument.
969 __ Daddu(a3, a0, Operand(1)); // Add one for receiver.
970 __ dsll(a3, a3, kPointerSizeLog2);
971 __ Dsubu(a3, a2, Operand(a3));
972
973 // Push the arguments.
974 Label loop_header, loop_check;
975 __ Branch(&loop_check);
976 __ bind(&loop_header);
977 __ ld(t0, MemOperand(a2));
978 __ Daddu(a2, a2, Operand(-kPointerSize));
979 __ push(t0);
980 __ bind(&loop_check);
981 __ Branch(&loop_header, gt, a2, Operand(a3));
982
983 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100984 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
985 tail_call_mode),
986 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000987}
988
989
990// static
991void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
992 // ----------- S t a t e -------------
993 // -- a0 : argument count (not including receiver)
994 // -- a3 : new target
995 // -- a1 : constructor to call
996 // -- a2 : address of the first argument
997 // -----------------------------------
998
999 // Find the address of the last argument.
1000 __ dsll(t0, a0, kPointerSizeLog2);
1001 __ Dsubu(t0, a2, Operand(t0));
1002
1003 // Push a slot for the receiver.
1004 __ push(zero_reg);
1005
1006 // Push the arguments.
1007 Label loop_header, loop_check;
1008 __ Branch(&loop_check);
1009 __ bind(&loop_header);
1010 __ ld(t1, MemOperand(a2));
1011 __ Daddu(a2, a2, Operand(-kPointerSize));
1012 __ push(t1);
1013 __ bind(&loop_check);
1014 __ Branch(&loop_header, gt, a2, Operand(t0));
1015
1016 // Call the constructor with a0, a1, and a3 unmodified.
1017 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1018}
1019
1020
Ben Murdoch097c5b22016-05-18 11:27:45 +01001021static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001022 // Initialize register file register and dispatch table register.
1023 __ Daddu(kInterpreterRegisterFileRegister, fp,
1024 Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001025 __ li(kInterpreterDispatchTableRegister,
1026 Operand(ExternalReference::interpreter_dispatch_table_address(
1027 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001028
1029 // Get the context from the frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001030 __ ld(kContextRegister,
1031 MemOperand(kInterpreterRegisterFileRegister,
1032 InterpreterFrameConstants::kContextFromRegisterPointer));
1033
1034 // Get the bytecode array pointer from the frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001035 __ ld(
1036 kInterpreterBytecodeArrayRegister,
1037 MemOperand(kInterpreterRegisterFileRegister,
1038 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039
1040 if (FLAG_debug_code) {
1041 // Check function data field is actually a BytecodeArray object.
1042 __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1043 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1044 Operand(zero_reg));
1045 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1046 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1047 Operand(BYTECODE_ARRAY_TYPE));
1048 }
1049
1050 // Get the target bytecode offset from the frame.
1051 __ ld(kInterpreterBytecodeOffsetRegister,
1052 MemOperand(
1053 kInterpreterRegisterFileRegister,
1054 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1055 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1056
1057 // Dispatch to the target bytecode.
1058 __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1059 kInterpreterBytecodeOffsetRegister);
1060 __ lbu(a1, MemOperand(a1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001061 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001062 __ ld(a1, MemOperand(a1));
1063 __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
1064 __ Jump(a1);
1065}
1066
1067
Ben Murdoch097c5b22016-05-18 11:27:45 +01001068static void Generate_InterpreterNotifyDeoptimizedHelper(
1069 MacroAssembler* masm, Deoptimizer::BailoutType type) {
1070 // Enter an internal frame.
1071 {
1072 FrameScope scope(masm, StackFrame::INTERNAL);
1073
1074 // Pass the deoptimization type to the runtime system.
1075 __ li(a1, Operand(Smi::FromInt(static_cast<int>(type))));
1076 __ push(a1);
1077 __ CallRuntime(Runtime::kNotifyDeoptimized);
1078 // Tear down internal frame.
1079 }
1080
1081 // Drop state (we don't use these for interpreter deopts) and and pop the
1082 // accumulator value into the accumulator register.
1083 __ Drop(1);
1084 __ Pop(kInterpreterAccumulatorRegister);
1085
1086 // Enter the bytecode dispatch.
1087 Generate_EnterBytecodeDispatch(masm);
1088}
1089
1090
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001091void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1092 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1093}
1094
1095
1096void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1097 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1098}
1099
1100
1101void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1102 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1103}
1104
Ben Murdoch097c5b22016-05-18 11:27:45 +01001105void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1106 // Set the address of the interpreter entry trampoline as a return address.
1107 // This simulates the initial call to bytecode handlers in interpreter entry
1108 // trampoline. The return will never actually be taken, but our stack walker
1109 // uses this address to determine whether a frame is interpreted.
1110 __ li(ra, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1111
1112 Generate_EnterBytecodeDispatch(masm);
1113}
1114
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001115
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001116void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001117 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001118}
1119
1120
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001121void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001122 GenerateTailCallToReturnedCode(masm,
1123 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124}
1125
1126
1127void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001128 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129}
1130
1131
1132static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1133 // For now, we are relying on the fact that make_code_young doesn't do any
1134 // garbage collection which allows us to save/restore the registers without
1135 // worrying about which of them contain pointers. We also don't build an
1136 // internal frame to make the code faster, since we shouldn't have to do stack
1137 // crawls in MakeCodeYoung. This seems a bit fragile.
1138
1139 // Set a0 to point to the head of the PlatformCodeAge sequence.
1140 __ Dsubu(a0, a0,
1141 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1142
1143 // The following registers must be saved and restored when calling through to
1144 // the runtime:
1145 // a0 - contains return address (beginning of patch sequence)
1146 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001147 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 FrameScope scope(masm, StackFrame::MANUAL);
1151 __ MultiPush(saved_regs);
1152 __ PrepareCallCFunction(2, 0, a2);
1153 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1154 __ CallCFunction(
1155 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1156 __ MultiPop(saved_regs);
1157 __ Jump(a0);
1158}
1159
1160#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1161void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1162 MacroAssembler* masm) { \
1163 GenerateMakeCodeYoungAgainCommon(masm); \
1164} \
1165void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1166 MacroAssembler* masm) { \
1167 GenerateMakeCodeYoungAgainCommon(masm); \
1168}
1169CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1170#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1171
1172
1173void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1174 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1175 // that make_code_young doesn't do any garbage collection which allows us to
1176 // save/restore the registers without worrying about which of them contain
1177 // pointers.
1178
1179 // Set a0 to point to the head of the PlatformCodeAge sequence.
1180 __ Dsubu(a0, a0,
1181 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1182
1183 // The following registers must be saved and restored when calling through to
1184 // the runtime:
1185 // a0 - contains return address (beginning of patch sequence)
1186 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001187 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001188 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001190 FrameScope scope(masm, StackFrame::MANUAL);
1191 __ MultiPush(saved_regs);
1192 __ PrepareCallCFunction(2, 0, a2);
1193 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1194 __ CallCFunction(
1195 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1196 2);
1197 __ MultiPop(saved_regs);
1198
1199 // Perform prologue operations usually performed by the young code stub.
1200 __ Push(ra, fp, cp, a1);
1201 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1202
1203 // Jump to point after the code-age stub.
1204 __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength)));
1205 __ Jump(a0);
1206}
1207
1208
1209void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1210 GenerateMakeCodeYoungAgainCommon(masm);
1211}
1212
1213
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001214void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1215 Generate_MarkCodeAsExecutedOnce(masm);
1216}
1217
1218
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001219static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1220 SaveFPRegsMode save_doubles) {
1221 {
1222 FrameScope scope(masm, StackFrame::INTERNAL);
1223
1224 // Preserve registers across notification, this is important for compiled
1225 // stubs that tail call the runtime on deopts passing their parameters in
1226 // registers.
1227 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1228 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001229 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001230 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1231 }
1232
1233 __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state
1234 __ Jump(ra); // Jump to miss handler
1235}
1236
1237
1238void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1239 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1240}
1241
1242
1243void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1244 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1245}
1246
1247
1248static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1249 Deoptimizer::BailoutType type) {
1250 {
1251 FrameScope scope(masm, StackFrame::INTERNAL);
1252 // Pass the function and deoptimization type to the runtime system.
1253 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1254 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001255 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001256 }
1257
1258 // Get the full codegen state from the stack and untag it -> a6.
1259 __ ld(a6, MemOperand(sp, 0 * kPointerSize));
1260 __ SmiUntag(a6);
1261 // Switch on the state.
1262 Label with_tos_register, unknown_state;
1263 __ Branch(&with_tos_register,
1264 ne, a6, Operand(FullCodeGenerator::NO_REGISTERS));
1265 __ Ret(USE_DELAY_SLOT);
1266 // Safe to fill delay slot Addu will emit one instruction.
1267 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1268
1269 __ bind(&with_tos_register);
1270 __ ld(v0, MemOperand(sp, 1 * kPointerSize));
1271 __ Branch(&unknown_state, ne, a6, Operand(FullCodeGenerator::TOS_REG));
1272
1273 __ Ret(USE_DELAY_SLOT);
1274 // Safe to fill delay slot Addu will emit one instruction.
1275 __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1276
1277 __ bind(&unknown_state);
1278 __ stop("no cases left");
1279}
1280
1281
1282void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1283 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1284}
1285
1286
1287void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1288 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1289}
1290
1291
1292void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1293 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1294}
1295
1296
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001297// Clobbers {t2, t3, a4, a5}.
1298static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1299 Register function_template_info,
1300 Label* receiver_check_failed) {
1301 Register signature = t2;
1302 Register map = t3;
1303 Register constructor = a4;
1304 Register scratch = a5;
1305
1306 // If there is no signature, return the holder.
1307 __ ld(signature, FieldMemOperand(function_template_info,
1308 FunctionTemplateInfo::kSignatureOffset));
1309 Label receiver_check_passed;
1310 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1311 &receiver_check_passed);
1312
1313 // Walk the prototype chain.
1314 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1315 Label prototype_loop_start;
1316 __ bind(&prototype_loop_start);
1317
1318 // Get the constructor, if any.
1319 __ GetMapConstructor(constructor, map, scratch, scratch);
1320 Label next_prototype;
1321 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1322 Register type = constructor;
1323 __ ld(type,
1324 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1325 __ ld(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1326
1327 // Loop through the chain of inheriting function templates.
1328 Label function_template_loop;
1329 __ bind(&function_template_loop);
1330
1331 // If the signatures match, we have a compatible receiver.
1332 __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1333 USE_DELAY_SLOT);
1334
1335 // If the current type is not a FunctionTemplateInfo, load the next prototype
1336 // in the chain.
1337 __ JumpIfSmi(type, &next_prototype);
1338 __ GetObjectType(type, scratch, scratch);
1339 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1340
1341 // Otherwise load the parent function template and iterate.
1342 __ ld(type,
1343 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1344 __ Branch(&function_template_loop);
1345
1346 // Load the next prototype.
1347 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001348 __ lwu(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001349 __ DecodeField<Map::HasHiddenPrototype>(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001350 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001351
1352 __ ld(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1353 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001354 // Iterate.
1355 __ Branch(&prototype_loop_start);
1356
1357 __ bind(&receiver_check_passed);
1358}
1359
1360
1361void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1362 // ----------- S t a t e -------------
1363 // -- a0 : number of arguments excluding receiver
1364 // -- a1 : callee
1365 // -- ra : return address
1366 // -- sp[0] : last argument
1367 // -- ...
1368 // -- sp[8 * (argc - 1)] : first argument
1369 // -- sp[8 * argc] : receiver
1370 // -----------------------------------
1371
1372 // Load the FunctionTemplateInfo.
1373 __ ld(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1374 __ ld(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1375
1376 // Do the compatible receiver check
1377 Label receiver_check_failed;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001378 __ Dlsa(t8, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001379 __ ld(t0, MemOperand(t8));
1380 CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1381
1382 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1383 // beginning of the code.
1384 __ ld(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1385 __ ld(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1386 __ Daddu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1387 __ Jump(t2);
1388
1389 // Compatible receiver check failed: throw an Illegal Invocation exception.
1390 __ bind(&receiver_check_failed);
1391 // Drop the arguments (including the receiver);
1392 __ Daddu(t8, t8, Operand(kPointerSize));
1393 __ daddu(sp, t8, zero_reg);
1394 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1395}
1396
1397
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001398void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1399 // Lookup the function in the JavaScript frame.
1400 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1401 {
1402 FrameScope scope(masm, StackFrame::INTERNAL);
1403 // Pass function as argument.
1404 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001405 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001406 }
1407
1408 // If the code object is null, just return to the unoptimized code.
1409 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1410
1411 // Load deoptimization data from the code object.
1412 // <deopt_data> = <code>[#deoptimization_data_offset]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001413 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001414
1415 // Load the OSR entrypoint offset from the deoptimization data.
1416 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1417 __ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1418 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1419 __ SmiUntag(a1);
1420
1421 // Compute the target address = code_obj + header_size + osr_offset
1422 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1423 __ daddu(v0, v0, a1);
1424 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1425
1426 // And "return" to the OSR entry point of the function.
1427 __ Ret();
1428}
1429
1430
1431void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1432 // We check the stack limit as indicator that recompilation might be done.
1433 Label ok;
1434 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1435 __ Branch(&ok, hs, sp, Operand(at));
1436 {
1437 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001438 __ CallRuntime(Runtime::kStackGuard);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001439 }
1440 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1441 RelocInfo::CODE_TARGET);
1442
1443 __ bind(&ok);
1444 __ Ret();
1445}
1446
1447
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448// static
1449void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1450 int field_index) {
1451 // ----------- S t a t e -------------
1452 // -- sp[0] : receiver
1453 // -----------------------------------
1454
1455 // 1. Pop receiver into a0 and check that it's actually a JSDate object.
1456 Label receiver_not_date;
1457 {
1458 __ Pop(a0);
1459 __ JumpIfSmi(a0, &receiver_not_date);
1460 __ GetObjectType(a0, t0, t0);
1461 __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
1462 }
1463
1464 // 2. Load the specified date field, falling back to the runtime as necessary.
1465 if (field_index == JSDate::kDateValue) {
1466 __ Ret(USE_DELAY_SLOT);
1467 __ ld(v0, FieldMemOperand(a0, JSDate::kValueOffset)); // In delay slot.
1468 } else {
1469 if (field_index < JSDate::kFirstUncachedField) {
1470 Label stamp_mismatch;
1471 __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1472 __ ld(a1, MemOperand(a1));
1473 __ ld(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
1474 __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
1475 __ Ret(USE_DELAY_SLOT);
1476 __ ld(v0, FieldMemOperand(
1477 a0, JSDate::kValueOffset +
1478 field_index * kPointerSize)); // In delay slot.
1479 __ bind(&stamp_mismatch);
1480 }
1481 FrameScope scope(masm, StackFrame::INTERNAL);
1482 __ PrepareCallCFunction(2, t0);
1483 __ li(a1, Operand(Smi::FromInt(field_index)));
1484 __ CallCFunction(
1485 ExternalReference::get_date_field_function(masm->isolate()), 2);
1486 }
1487 __ Ret();
1488
1489 // 3. Raise a TypeError if the receiver is not a date.
1490 __ bind(&receiver_not_date);
1491 __ TailCallRuntime(Runtime::kThrowNotDateError);
1492}
1493
1494
1495// static
1496void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1497 // ----------- S t a t e -------------
1498 // -- a0 : argc
1499 // -- sp[0] : argArray
1500 // -- sp[4] : thisArg
1501 // -- sp[8] : receiver
1502 // -----------------------------------
1503
1504 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1505 // arguments from the stack (including the receiver), and push thisArg (if
1506 // present) instead.
1507 {
1508 Label no_arg;
1509 Register scratch = a4;
1510 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1511 __ mov(a3, a2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001512 // Dlsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 __ dsll(scratch, a0, kPointerSizeLog2);
1514 __ Daddu(a0, sp, Operand(scratch));
1515 __ ld(a1, MemOperand(a0)); // receiver
1516 __ Dsubu(a0, a0, Operand(kPointerSize));
1517 __ Branch(&no_arg, lt, a0, Operand(sp));
1518 __ ld(a2, MemOperand(a0)); // thisArg
1519 __ Dsubu(a0, a0, Operand(kPointerSize));
1520 __ Branch(&no_arg, lt, a0, Operand(sp));
1521 __ ld(a3, MemOperand(a0)); // argArray
1522 __ bind(&no_arg);
1523 __ Daddu(sp, sp, Operand(scratch));
1524 __ sd(a2, MemOperand(sp));
1525 __ mov(a0, a3);
1526 }
1527
1528 // ----------- S t a t e -------------
1529 // -- a0 : argArray
1530 // -- a1 : receiver
1531 // -- sp[0] : thisArg
1532 // -----------------------------------
1533
1534 // 2. Make sure the receiver is actually callable.
1535 Label receiver_not_callable;
1536 __ JumpIfSmi(a1, &receiver_not_callable);
1537 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1538 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1539 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1540 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
1541
1542 // 3. Tail call with no arguments if argArray is null or undefined.
1543 Label no_arguments;
1544 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1545 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1546
1547 // 4a. Apply the receiver to the given argArray (passing undefined for
1548 // new.target).
1549 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1550 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1551
1552 // 4b. The argArray is either null or undefined, so we tail call without any
1553 // arguments to the receiver.
1554 __ bind(&no_arguments);
1555 {
1556 __ mov(a0, zero_reg);
1557 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1558 }
1559
1560 // 4c. The receiver is not callable, throw an appropriate TypeError.
1561 __ bind(&receiver_not_callable);
1562 {
1563 __ sd(a1, MemOperand(sp));
1564 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1565 }
1566}
1567
1568
1569// static
1570void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001571 // 1. Make sure we have at least one argument.
1572 // a0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573 {
1574 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001575 __ Branch(&done, ne, a0, Operand(zero_reg));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001577 __ Daddu(a0, a0, Operand(1));
1578 __ bind(&done);
1579 }
1580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581 // 2. Get the function to call (passed as receiver) from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582 // a0: actual number of arguments
Ben Murdoch097c5b22016-05-18 11:27:45 +01001583 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001584 __ ld(a1, MemOperand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001585
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001586 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001587 // (overwriting the original receiver). Adjust argument count to make
1588 // the original first argument the new receiver.
1589 // a0: actual number of arguments
1590 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001591 {
1592 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001593 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001594 __ Dlsa(a2, sp, a0, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001595
1596 __ bind(&loop);
1597 __ ld(at, MemOperand(a2, -kPointerSize));
1598 __ sd(at, MemOperand(a2));
1599 __ Dsubu(a2, a2, Operand(kPointerSize));
1600 __ Branch(&loop, ne, a2, Operand(sp));
1601 // Adjust the actual number of arguments and remove the top element
1602 // (which is a copy of the last argument).
1603 __ Dsubu(a0, a0, Operand(1));
1604 __ Pop();
1605 }
1606
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001607 // 4. Call the callable.
1608 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001609}
1610
1611
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001612void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1613 // ----------- S t a t e -------------
1614 // -- a0 : argc
1615 // -- sp[0] : argumentsList
1616 // -- sp[4] : thisArgument
1617 // -- sp[8] : target
1618 // -- sp[12] : receiver
1619 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001620
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001621 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1622 // remove all arguments from the stack (including the receiver), and push
1623 // thisArgument (if present) instead.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001624 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625 Label no_arg;
1626 Register scratch = a4;
1627 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1628 __ mov(a2, a1);
1629 __ mov(a3, a1);
1630 __ dsll(scratch, a0, kPointerSizeLog2);
1631 __ mov(a0, scratch);
1632 __ Dsubu(a0, a0, Operand(kPointerSize));
1633 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1634 __ Daddu(a0, sp, Operand(a0));
1635 __ ld(a1, MemOperand(a0)); // target
1636 __ Dsubu(a0, a0, Operand(kPointerSize));
1637 __ Branch(&no_arg, lt, a0, Operand(sp));
1638 __ ld(a2, MemOperand(a0)); // thisArgument
1639 __ Dsubu(a0, a0, Operand(kPointerSize));
1640 __ Branch(&no_arg, lt, a0, Operand(sp));
1641 __ ld(a3, MemOperand(a0)); // argumentsList
1642 __ bind(&no_arg);
1643 __ Daddu(sp, sp, Operand(scratch));
1644 __ sd(a2, MemOperand(sp));
1645 __ mov(a0, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001646 }
1647
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001648 // ----------- S t a t e -------------
1649 // -- a0 : argumentsList
1650 // -- a1 : target
1651 // -- sp[0] : thisArgument
1652 // -----------------------------------
1653
1654 // 2. Make sure the target is actually callable.
1655 Label target_not_callable;
1656 __ JumpIfSmi(a1, &target_not_callable);
1657 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1658 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1659 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1660 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
1661
1662 // 3a. Apply the target to the given argumentsList (passing undefined for
1663 // new.target).
1664 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1665 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1666
1667 // 3b. The target is not callable, throw an appropriate TypeError.
1668 __ bind(&target_not_callable);
1669 {
1670 __ sd(a1, MemOperand(sp));
1671 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1672 }
1673}
1674
1675
1676void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1677 // ----------- S t a t e -------------
1678 // -- a0 : argc
1679 // -- sp[0] : new.target (optional)
1680 // -- sp[4] : argumentsList
1681 // -- sp[8] : target
1682 // -- sp[12] : receiver
1683 // -----------------------------------
1684
1685 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1686 // new.target into a3 (if present, otherwise use target), remove all
1687 // arguments from the stack (including the receiver), and push thisArgument
1688 // (if present) instead.
1689 {
1690 Label no_arg;
1691 Register scratch = a4;
1692 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1693 __ mov(a2, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001694 // Dlsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001695 __ dsll(scratch, a0, kPointerSizeLog2);
1696 __ Daddu(a0, sp, Operand(scratch));
1697 __ sd(a2, MemOperand(a0)); // receiver
1698 __ Dsubu(a0, a0, Operand(kPointerSize));
1699 __ Branch(&no_arg, lt, a0, Operand(sp));
1700 __ ld(a1, MemOperand(a0)); // target
1701 __ mov(a3, a1); // new.target defaults to target
1702 __ Dsubu(a0, a0, Operand(kPointerSize));
1703 __ Branch(&no_arg, lt, a0, Operand(sp));
1704 __ ld(a2, MemOperand(a0)); // argumentsList
1705 __ Dsubu(a0, a0, Operand(kPointerSize));
1706 __ Branch(&no_arg, lt, a0, Operand(sp));
1707 __ ld(a3, MemOperand(a0)); // new.target
1708 __ bind(&no_arg);
1709 __ Daddu(sp, sp, Operand(scratch));
1710 __ mov(a0, a2);
1711 }
1712
1713 // ----------- S t a t e -------------
1714 // -- a0 : argumentsList
1715 // -- a3 : new.target
1716 // -- a1 : target
1717 // -- sp[0] : receiver (undefined)
1718 // -----------------------------------
1719
1720 // 2. Make sure the target is actually a constructor.
1721 Label target_not_constructor;
1722 __ JumpIfSmi(a1, &target_not_constructor);
1723 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1724 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1725 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1726 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
1727
1728 // 3. Make sure the target is actually a constructor.
1729 Label new_target_not_constructor;
1730 __ JumpIfSmi(a3, &new_target_not_constructor);
1731 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset));
1732 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1733 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1734 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
1735
1736 // 4a. Construct the target with the given new.target and argumentsList.
1737 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1738
1739 // 4b. The target is not a constructor, throw an appropriate TypeError.
1740 __ bind(&target_not_constructor);
1741 {
1742 __ sd(a1, MemOperand(sp));
1743 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1744 }
1745
1746 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1747 __ bind(&new_target_not_constructor);
1748 {
1749 __ sd(a3, MemOperand(sp));
1750 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1751 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001752}
1753
1754
1755static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1756 Label* stack_overflow) {
1757 // ----------- S t a t e -------------
1758 // -- a0 : actual number of arguments
1759 // -- a1 : function (passed through to callee)
1760 // -- a2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001761 // -- a3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001762 // -----------------------------------
1763 // Check the stack for overflow. We are not trying to catch
1764 // interruptions (e.g. debug break and preemption) here, so the "real stack
1765 // limit" is checked.
1766 __ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
1767 // Make a5 the space we have left. The stack might already be overflowed
1768 // here which will cause a5 to become negative.
1769 __ dsubu(a5, sp, a5);
1770 // Check if the arguments will overflow the stack.
1771 __ dsll(at, a2, kPointerSizeLog2);
1772 // Signed comparison.
1773 __ Branch(stack_overflow, le, a5, Operand(at));
1774}
1775
1776
1777static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1778 // __ sll(a0, a0, kSmiTagSize);
1779 __ dsll32(a0, a0, 0);
1780 __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1781 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1782 __ Daddu(fp, sp,
1783 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1784}
1785
1786
1787static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1788 // ----------- S t a t e -------------
1789 // -- v0 : result being passed through
1790 // -----------------------------------
1791 // Get the number of arguments passed (as a smi), tear down the frame and
1792 // then tear down the parameters.
1793 __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1794 kPointerSize)));
1795 __ mov(sp, fp);
1796 __ MultiPop(fp.bit() | ra.bit());
1797 __ SmiScale(a4, a1, kPointerSizeLog2);
1798 __ Daddu(sp, sp, a4);
1799 // Adjust for the receiver.
1800 __ Daddu(sp, sp, Operand(kPointerSize));
1801}
1802
1803
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001804// static
1805void Builtins::Generate_Apply(MacroAssembler* masm) {
1806 // ----------- S t a t e -------------
1807 // -- a0 : argumentsList
1808 // -- a1 : target
1809 // -- a3 : new.target (checked to be constructor or undefined)
1810 // -- sp[0] : thisArgument
1811 // -----------------------------------
1812
1813 // Create the list of arguments from the array-like argumentsList.
1814 {
1815 Label create_arguments, create_array, create_runtime, done_create;
1816 __ JumpIfSmi(a0, &create_runtime);
1817
1818 // Load the map of argumentsList into a2.
1819 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
1820
1821 // Load native context into a4.
1822 __ ld(a4, NativeContextMemOperand());
1823
1824 // Check if argumentsList is an (unmodified) arguments object.
1825 __ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1826 __ Branch(&create_arguments, eq, a2, Operand(at));
1827 __ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX));
1828 __ Branch(&create_arguments, eq, a2, Operand(at));
1829
1830 // Check if argumentsList is a fast JSArray.
1831 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
1832 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
1833 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
1834
1835 // Ask the runtime to create the list (actually a FixedArray).
1836 __ bind(&create_runtime);
1837 {
1838 FrameScope scope(masm, StackFrame::INTERNAL);
1839 __ Push(a1, a3, a0);
1840 __ CallRuntime(Runtime::kCreateListFromArrayLike);
1841 __ mov(a0, v0);
1842 __ Pop(a1, a3);
1843 __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
1844 __ SmiUntag(a2);
1845 }
1846 __ Branch(&done_create);
1847
1848 // Try to create the list from an arguments object.
1849 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001850 __ ld(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 __ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset));
1852 __ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset));
1853 __ Branch(&create_runtime, ne, a2, Operand(at));
1854 __ SmiUntag(a2);
1855 __ mov(a0, a4);
1856 __ Branch(&done_create);
1857
1858 // Try to create the list from a JSArray object.
1859 __ bind(&create_array);
1860 __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset));
1861 __ DecodeField<Map::ElementsKindBits>(a2);
1862 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1863 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1864 STATIC_ASSERT(FAST_ELEMENTS == 2);
1865 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
1866 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
1867 __ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
1868 __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
1869 __ SmiUntag(a2);
1870
1871 __ bind(&done_create);
1872 }
1873
1874 // Check for stack overflow.
1875 {
1876 // Check the stack for overflow. We are not trying to catch interruptions
1877 // (i.e. debug break and preemption) here, so check the "real stack limit".
1878 Label done;
1879 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
1880 // Make ip the space we have left. The stack might already be overflowed
1881 // here which will cause ip to become negative.
1882 __ Dsubu(a4, sp, a4);
1883 // Check if the arguments will overflow the stack.
1884 __ dsll(at, a2, kPointerSizeLog2);
1885 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison.
1886 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1887 __ bind(&done);
1888 }
1889
1890 // ----------- S t a t e -------------
1891 // -- a1 : target
1892 // -- a0 : args (a FixedArray built from argumentsList)
1893 // -- a2 : len (number of elements to push from args)
1894 // -- a3 : new.target (checked to be constructor or undefined)
1895 // -- sp[0] : thisArgument
1896 // -----------------------------------
1897
1898 // Push arguments onto the stack (thisArgument is already on the stack).
1899 {
1900 __ mov(a4, zero_reg);
1901 Label done, loop;
1902 __ bind(&loop);
1903 __ Branch(&done, eq, a4, Operand(a2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001904 __ Dlsa(at, a0, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001905 __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize));
1906 __ Push(at);
1907 __ Daddu(a4, a4, Operand(1));
1908 __ Branch(&loop);
1909 __ bind(&done);
1910 __ Move(a0, a4);
1911 }
1912
1913 // Dispatch to Call or Construct depending on whether new.target is undefined.
1914 {
1915 Label construct;
1916 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1917 __ Branch(&construct, ne, a3, Operand(at));
1918 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1919 __ bind(&construct);
1920 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1921 }
1922}
1923
Ben Murdoch097c5b22016-05-18 11:27:45 +01001924namespace {
1925
1926// Drops top JavaScript frame and an arguments adaptor frame below it (if
1927// present) preserving all the arguments prepared for current call.
1928// Does nothing if debugger is currently active.
1929// ES6 14.6.3. PrepareForTailCall
1930//
1931// Stack structure for the function g() tail calling f():
1932//
1933// ------- Caller frame: -------
1934// | ...
1935// | g()'s arg M
1936// | ...
1937// | g()'s arg 1
1938// | g()'s receiver arg
1939// | g()'s caller pc
1940// ------- g()'s frame: -------
1941// | g()'s caller fp <- fp
1942// | g()'s context
1943// | function pointer: g
1944// | -------------------------
1945// | ...
1946// | ...
1947// | f()'s arg N
1948// | ...
1949// | f()'s arg 1
1950// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
1951// ----------------------
1952//
1953void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
1954 Register scratch1, Register scratch2,
1955 Register scratch3) {
1956 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1957 Comment cmnt(masm, "[ PrepareForTailCall");
1958
1959 // Prepare for tail call only if the debugger is not active.
1960 Label done;
1961 ExternalReference debug_is_active =
1962 ExternalReference::debug_is_active_address(masm->isolate());
1963 __ li(at, Operand(debug_is_active));
1964 __ lb(scratch1, MemOperand(at));
1965 __ Branch(&done, ne, scratch1, Operand(zero_reg));
1966
1967 // Drop possible interpreter handler/stub frame.
1968 {
1969 Label no_interpreter_frame;
1970 __ ld(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset));
1971 __ Branch(&no_interpreter_frame, ne, scratch3,
1972 Operand(Smi::FromInt(StackFrame::STUB)));
1973 __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1974 __ bind(&no_interpreter_frame);
1975 }
1976
1977 // Check if next frame is an arguments adaptor frame.
1978 Label no_arguments_adaptor, formal_parameter_count_loaded;
1979 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1980 __ ld(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset));
1981 __ Branch(&no_arguments_adaptor, ne, scratch3,
1982 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1983
1984 // Drop arguments adaptor frame and load arguments count.
1985 __ mov(fp, scratch2);
1986 __ ld(scratch1,
1987 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1988 __ SmiUntag(scratch1);
1989 __ Branch(&formal_parameter_count_loaded);
1990
1991 __ bind(&no_arguments_adaptor);
1992 // Load caller's formal parameter count
1993 __ ld(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1994 __ ld(scratch1,
1995 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
1996 __ lw(scratch1,
1997 FieldMemOperand(scratch1,
1998 SharedFunctionInfo::kFormalParameterCountOffset));
1999
2000 __ bind(&formal_parameter_count_loaded);
2001
2002 // Calculate the end of destination area where we will put the arguments
2003 // after we drop current frame. We add kPointerSize to count the receiver
2004 // argument which is not included into formal parameters count.
2005 Register dst_reg = scratch2;
2006 __ Dlsa(dst_reg, fp, scratch1, kPointerSizeLog2);
2007 __ Daddu(dst_reg, dst_reg,
2008 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
2009
2010 Register src_reg = scratch1;
2011 __ Dlsa(src_reg, sp, args_reg, kPointerSizeLog2);
2012 // Count receiver argument as well (not included in args_reg).
2013 __ Daddu(src_reg, src_reg, Operand(kPointerSize));
2014
2015 if (FLAG_debug_code) {
2016 __ Check(lo, kStackAccessBelowStackPointer, src_reg, Operand(dst_reg));
2017 }
2018
2019 // Restore caller's frame pointer and return address now as they will be
2020 // overwritten by the copying loop.
2021 __ ld(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
2022 __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2023
2024 // Now copy callee arguments to the caller frame going backwards to avoid
2025 // callee arguments corruption (source and destination areas could overlap).
2026
2027 // Both src_reg and dst_reg are pointing to the word after the one to copy,
2028 // so they must be pre-decremented in the loop.
2029 Register tmp_reg = scratch3;
2030 Label loop, entry;
2031 __ Branch(&entry);
2032 __ bind(&loop);
2033 __ Dsubu(src_reg, src_reg, Operand(kPointerSize));
2034 __ Dsubu(dst_reg, dst_reg, Operand(kPointerSize));
2035 __ ld(tmp_reg, MemOperand(src_reg));
2036 __ sd(tmp_reg, MemOperand(dst_reg));
2037 __ bind(&entry);
2038 __ Branch(&loop, ne, sp, Operand(src_reg));
2039
2040 // Leave current frame.
2041 __ mov(sp, dst_reg);
2042
2043 __ bind(&done);
2044}
2045} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002046
2047// static
2048void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002049 ConvertReceiverMode mode,
2050 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002051 // ----------- S t a t e -------------
2052 // -- a0 : the number of arguments (not including the receiver)
2053 // -- a1 : the function to call (checked to be a JSFunction)
2054 // -----------------------------------
2055 __ AssertFunction(a1);
2056
2057 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2058 // Check that function is not a "classConstructor".
2059 Label class_constructor;
2060 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2061 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2062 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2063 __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2064
2065 // Enter the context of the function; ToObject has to run in the function
2066 // context, and we also need to take the global proxy from the function
2067 // context in case of conversion.
2068 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2069 SharedFunctionInfo::kStrictModeByteOffset);
2070 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2071 // We need to convert the receiver for non-native sloppy mode functions.
2072 Label done_convert;
2073 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2074 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2075 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2076 __ Branch(&done_convert, ne, at, Operand(zero_reg));
2077 {
2078 // ----------- S t a t e -------------
2079 // -- a0 : the number of arguments (not including the receiver)
2080 // -- a1 : the function to call (checked to be a JSFunction)
2081 // -- a2 : the shared function info.
2082 // -- cp : the function context.
2083 // -----------------------------------
2084
2085 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2086 // Patch receiver to global proxy.
2087 __ LoadGlobalProxy(a3);
2088 } else {
2089 Label convert_to_object, convert_receiver;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002090 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002091 __ ld(a3, MemOperand(at));
2092 __ JumpIfSmi(a3, &convert_to_object);
2093 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2094 __ GetObjectType(a3, a4, a4);
2095 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
2096 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2097 Label convert_global_proxy;
2098 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2099 &convert_global_proxy);
2100 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2101 __ bind(&convert_global_proxy);
2102 {
2103 // Patch receiver to global proxy.
2104 __ LoadGlobalProxy(a3);
2105 }
2106 __ Branch(&convert_receiver);
2107 }
2108 __ bind(&convert_to_object);
2109 {
2110 // Convert receiver using ToObject.
2111 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2112 // in the fast case? (fall back to AllocateInNewSpace?)
2113 FrameScope scope(masm, StackFrame::INTERNAL);
2114 __ SmiTag(a0);
2115 __ Push(a0, a1);
2116 __ mov(a0, a3);
2117 ToObjectStub stub(masm->isolate());
2118 __ CallStub(&stub);
2119 __ mov(a3, v0);
2120 __ Pop(a0, a1);
2121 __ SmiUntag(a0);
2122 }
2123 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2124 __ bind(&convert_receiver);
2125 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002126 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002127 __ sd(a3, MemOperand(at));
2128 }
2129 __ bind(&done_convert);
2130
2131 // ----------- S t a t e -------------
2132 // -- a0 : the number of arguments (not including the receiver)
2133 // -- a1 : the function to call (checked to be a JSFunction)
2134 // -- a2 : the shared function info.
2135 // -- cp : the function context.
2136 // -----------------------------------
2137
Ben Murdoch097c5b22016-05-18 11:27:45 +01002138 if (tail_call_mode == TailCallMode::kAllow) {
2139 PrepareForTailCall(masm, a0, t0, t1, t2);
2140 }
2141
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002142 __ lw(a2,
2143 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2144 ParameterCount actual(a0);
2145 ParameterCount expected(a2);
2146 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2147 CheckDebugStepCallWrapper());
2148
2149 // The function is a "classConstructor", need to raise an exception.
2150 __ bind(&class_constructor);
2151 {
2152 FrameScope frame(masm, StackFrame::INTERNAL);
2153 __ Push(a1);
2154 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2155 }
2156}
2157
2158
2159// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002160void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2161 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002162 // ----------- S t a t e -------------
2163 // -- a0 : the number of arguments (not including the receiver)
2164 // -- a1 : the function to call (checked to be a JSBoundFunction)
2165 // -----------------------------------
2166 __ AssertBoundFunction(a1);
2167
Ben Murdoch097c5b22016-05-18 11:27:45 +01002168 if (tail_call_mode == TailCallMode::kAllow) {
2169 PrepareForTailCall(masm, a0, t0, t1, t2);
2170 }
2171
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002172 // Patch the receiver to [[BoundThis]].
2173 {
2174 __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002175 __ Dlsa(a4, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002176 __ sd(at, MemOperand(a4));
2177 }
2178
2179 // Load [[BoundArguments]] into a2 and length of that into a4.
2180 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2181 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2182 __ SmiUntag(a4);
2183
2184 // ----------- S t a t e -------------
2185 // -- a0 : the number of arguments (not including the receiver)
2186 // -- a1 : the function to call (checked to be a JSBoundFunction)
2187 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2188 // -- a4 : the number of [[BoundArguments]]
2189 // -----------------------------------
2190
2191 // Reserve stack space for the [[BoundArguments]].
2192 {
2193 Label done;
2194 __ dsll(a5, a4, kPointerSizeLog2);
2195 __ Dsubu(sp, sp, Operand(a5));
2196 // Check the stack for overflow. We are not trying to catch interruptions
2197 // (i.e. debug break and preemption) here, so check the "real stack limit".
2198 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2199 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2200 // Restore the stack pointer.
2201 __ Daddu(sp, sp, Operand(a5));
2202 {
2203 FrameScope scope(masm, StackFrame::MANUAL);
2204 __ EnterFrame(StackFrame::INTERNAL);
2205 __ CallRuntime(Runtime::kThrowStackOverflow);
2206 }
2207 __ bind(&done);
2208 }
2209
2210 // Relocate arguments down the stack.
2211 {
2212 Label loop, done_loop;
2213 __ mov(a5, zero_reg);
2214 __ bind(&loop);
2215 __ Branch(&done_loop, gt, a5, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002216 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002217 __ ld(at, MemOperand(a6));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002218 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002219 __ sd(at, MemOperand(a6));
2220 __ Daddu(a4, a4, Operand(1));
2221 __ Daddu(a5, a5, Operand(1));
2222 __ Branch(&loop);
2223 __ bind(&done_loop);
2224 }
2225
2226 // Copy [[BoundArguments]] to the stack (below the arguments).
2227 {
2228 Label loop, done_loop;
2229 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2230 __ SmiUntag(a4);
2231 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2232 __ bind(&loop);
2233 __ Dsubu(a4, a4, Operand(1));
2234 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002235 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002236 __ ld(at, MemOperand(a5));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002237 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002238 __ sd(at, MemOperand(a5));
2239 __ Daddu(a0, a0, Operand(1));
2240 __ Branch(&loop);
2241 __ bind(&done_loop);
2242 }
2243
2244 // Call the [[BoundTargetFunction]] via the Call builtin.
2245 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2246 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2247 masm->isolate())));
2248 __ ld(at, MemOperand(at));
2249 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2250 __ Jump(at);
2251}
2252
2253
2254// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2256 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 // ----------- S t a t e -------------
2258 // -- a0 : the number of arguments (not including the receiver)
2259 // -- a1 : the target to call (can be any Object).
2260 // -----------------------------------
2261
2262 Label non_callable, non_function, non_smi;
2263 __ JumpIfSmi(a1, &non_callable);
2264 __ bind(&non_smi);
2265 __ GetObjectType(a1, t1, t2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002267 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002268 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002270
2271 // Check if target has a [[Call]] internal method.
2272 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2273 __ And(t1, t1, Operand(1 << Map::kIsCallable));
2274 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002276 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2277
Ben Murdoch097c5b22016-05-18 11:27:45 +01002278 // 0. Prepare for tail call if necessary.
2279 if (tail_call_mode == TailCallMode::kAllow) {
2280 PrepareForTailCall(masm, a0, t0, t1, t2);
2281 }
2282
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002283 // 1. Runtime fallback for Proxy [[Call]].
2284 __ Push(a1);
2285 // Increase the arguments size to include the pushed function and the
2286 // existing receiver on the stack.
2287 __ Daddu(a0, a0, 2);
2288 // Tail-call to the runtime.
2289 __ JumpToExternalReference(
2290 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2291
2292 // 2. Call to something else, which might have a [[Call]] internal method (if
2293 // not we raise an exception).
2294 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002295 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002296 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002297 __ sd(a1, MemOperand(at));
2298 // Let the "call_as_function_delegate" take care of the rest.
2299 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2300 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002301 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002302 RelocInfo::CODE_TARGET);
2303
2304 // 3. Call to something that is not callable.
2305 __ bind(&non_callable);
2306 {
2307 FrameScope scope(masm, StackFrame::INTERNAL);
2308 __ Push(a1);
2309 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2310 }
2311}
2312
2313
2314void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2315 // ----------- S t a t e -------------
2316 // -- a0 : the number of arguments (not including the receiver)
2317 // -- a1 : the constructor to call (checked to be a JSFunction)
2318 // -- a3 : the new target (checked to be a constructor)
2319 // -----------------------------------
2320 __ AssertFunction(a1);
2321
2322 // Calling convention for function specific ConstructStubs require
2323 // a2 to contain either an AllocationSite or undefined.
2324 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2325
2326 // Tail call to the function-specific construct stub (still in the caller
2327 // context at this point).
2328 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2329 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
2330 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
2331 __ Jump(at);
2332}
2333
2334
2335// static
2336void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2337 // ----------- S t a t e -------------
2338 // -- a0 : the number of arguments (not including the receiver)
2339 // -- a1 : the function to call (checked to be a JSBoundFunction)
2340 // -- a3 : the new target (checked to be a constructor)
2341 // -----------------------------------
2342 __ AssertBoundFunction(a1);
2343
2344 // Load [[BoundArguments]] into a2 and length of that into a4.
2345 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2346 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2347 __ SmiUntag(a4);
2348
2349 // ----------- S t a t e -------------
2350 // -- a0 : the number of arguments (not including the receiver)
2351 // -- a1 : the function to call (checked to be a JSBoundFunction)
2352 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2353 // -- a3 : the new target (checked to be a constructor)
2354 // -- a4 : the number of [[BoundArguments]]
2355 // -----------------------------------
2356
2357 // Reserve stack space for the [[BoundArguments]].
2358 {
2359 Label done;
2360 __ dsll(a5, a4, kPointerSizeLog2);
2361 __ Dsubu(sp, sp, Operand(a5));
2362 // Check the stack for overflow. We are not trying to catch interruptions
2363 // (i.e. debug break and preemption) here, so check the "real stack limit".
2364 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2365 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2366 // Restore the stack pointer.
2367 __ Daddu(sp, sp, Operand(a5));
2368 {
2369 FrameScope scope(masm, StackFrame::MANUAL);
2370 __ EnterFrame(StackFrame::INTERNAL);
2371 __ CallRuntime(Runtime::kThrowStackOverflow);
2372 }
2373 __ bind(&done);
2374 }
2375
2376 // Relocate arguments down the stack.
2377 {
2378 Label loop, done_loop;
2379 __ mov(a5, zero_reg);
2380 __ bind(&loop);
2381 __ Branch(&done_loop, ge, a5, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002382 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002383 __ ld(at, MemOperand(a6));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002384 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002385 __ sd(at, MemOperand(a6));
2386 __ Daddu(a4, a4, Operand(1));
2387 __ Daddu(a5, a5, Operand(1));
2388 __ Branch(&loop);
2389 __ bind(&done_loop);
2390 }
2391
2392 // Copy [[BoundArguments]] to the stack (below the arguments).
2393 {
2394 Label loop, done_loop;
2395 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2396 __ SmiUntag(a4);
2397 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2398 __ bind(&loop);
2399 __ Dsubu(a4, a4, Operand(1));
2400 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002401 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002402 __ ld(at, MemOperand(a5));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002403 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002404 __ sd(at, MemOperand(a5));
2405 __ Daddu(a0, a0, Operand(1));
2406 __ Branch(&loop);
2407 __ bind(&done_loop);
2408 }
2409
2410 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2411 {
2412 Label skip_load;
2413 __ Branch(&skip_load, ne, a1, Operand(a3));
2414 __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2415 __ bind(&skip_load);
2416 }
2417
2418 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2419 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2420 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2421 __ ld(at, MemOperand(at));
2422 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2423 __ Jump(at);
2424}
2425
2426
2427// static
2428void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2429 // ----------- S t a t e -------------
2430 // -- a0 : the number of arguments (not including the receiver)
2431 // -- a1 : the constructor to call (checked to be a JSProxy)
2432 // -- a3 : the new target (either the same as the constructor or
2433 // the JSFunction on which new was invoked initially)
2434 // -----------------------------------
2435
2436 // Call into the Runtime for Proxy [[Construct]].
2437 __ Push(a1, a3);
2438 // Include the pushed new_target, constructor and the receiver.
2439 __ Daddu(a0, a0, Operand(3));
2440 // Tail-call to the runtime.
2441 __ JumpToExternalReference(
2442 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2443}
2444
2445
2446// static
2447void Builtins::Generate_Construct(MacroAssembler* masm) {
2448 // ----------- S t a t e -------------
2449 // -- a0 : the number of arguments (not including the receiver)
2450 // -- a1 : the constructor to call (can be any Object)
2451 // -- a3 : the new target (either the same as the constructor or
2452 // the JSFunction on which new was invoked initially)
2453 // -----------------------------------
2454
2455 // Check if target is a Smi.
2456 Label non_constructor;
2457 __ JumpIfSmi(a1, &non_constructor);
2458
2459 // Dispatch based on instance type.
2460 __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2461 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2462 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2463 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2464
2465 // Check if target has a [[Construct]] internal method.
2466 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2467 __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2468 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2469
2470 // Only dispatch to bound functions after checking whether they are
2471 // constructors.
2472 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2473 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2474
2475 // Only dispatch to proxies after checking whether they are constructors.
2476 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2477 eq, t2, Operand(JS_PROXY_TYPE));
2478
2479 // Called Construct on an exotic Object with a [[Construct]] internal method.
2480 {
2481 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002482 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002483 __ sd(a1, MemOperand(at));
2484 // Let the "call_as_constructor_delegate" take care of the rest.
2485 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2486 __ Jump(masm->isolate()->builtins()->CallFunction(),
2487 RelocInfo::CODE_TARGET);
2488 }
2489
2490 // Called Construct on an Object that doesn't have a [[Construct]] internal
2491 // method.
2492 __ bind(&non_constructor);
2493 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2494 RelocInfo::CODE_TARGET);
2495}
2496
2497
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002498void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2499 // State setup as expected by MacroAssembler::InvokePrologue.
2500 // ----------- S t a t e -------------
2501 // -- a0: actual arguments count
2502 // -- a1: function (passed through to callee)
2503 // -- a2: expected arguments count
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002504 // -- a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002505 // -----------------------------------
2506
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002507 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002508
2509 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002510 __ Branch(&dont_adapt_arguments, eq,
2511 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2512 // We use Uless as the number of argument should always be greater than 0.
2513 __ Branch(&too_few, Uless, a0, Operand(a2));
2514
2515 { // Enough parameters: actual >= expected.
2516 // a0: actual number of arguments as a smi
2517 // a1: function
2518 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002519 // a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002520 __ bind(&enough);
2521 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002522 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002523
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002524 // Calculate copy start address into a0 and copy end address into a4.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002525 __ SmiScale(a0, a0, kPointerSizeLog2);
2526 __ Daddu(a0, fp, a0);
2527 // Adjust for return address and receiver.
2528 __ Daddu(a0, a0, Operand(2 * kPointerSize));
2529 // Compute copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002530 __ dsll(a4, a2, kPointerSizeLog2);
2531 __ dsubu(a4, a0, a4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002532
2533 // Copy the arguments (including the receiver) to the new stack frame.
2534 // a0: copy start address
2535 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002536 // a2: expected number of arguments
2537 // a3: new target (passed through to callee)
2538 // a4: copy end address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002539
2540 Label copy;
2541 __ bind(&copy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002542 __ ld(a5, MemOperand(a0));
2543 __ push(a5);
2544 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002545 __ daddiu(a0, a0, -kPointerSize); // In delay slot.
2546
2547 __ jmp(&invoke);
2548 }
2549
2550 { // Too few parameters: Actual < expected.
2551 __ bind(&too_few);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002553 // If the function is strong we need to throw an error.
2554 Label no_strong_error;
2555 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2556 __ lbu(a5, FieldMemOperand(a4, SharedFunctionInfo::kStrongModeByteOffset));
2557 __ And(a5, a5, Operand(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
2558 __ Branch(&no_strong_error, eq, a5, Operand(zero_reg));
2559
2560 // What we really care about is the required number of arguments.
2561 DCHECK_EQ(kPointerSize, kInt64Size);
2562 __ lw(a5, FieldMemOperand(a4, SharedFunctionInfo::kLengthOffset));
2563 __ srl(a5, a5, 1);
2564 __ Branch(&no_strong_error, ge, a0, Operand(a5));
2565
2566 {
2567 FrameScope frame(masm, StackFrame::MANUAL);
2568 EnterArgumentsAdaptorFrame(masm);
2569 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2570 }
2571
2572 __ bind(&no_strong_error);
2573 EnterArgumentsAdaptorFrame(masm);
2574 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2575
2576 // Calculate copy start address into a0 and copy end address into a7.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002577 // a0: actual number of arguments as a smi
2578 // a1: function
2579 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002580 // a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002581 __ SmiScale(a0, a0, kPointerSizeLog2);
2582 __ Daddu(a0, fp, a0);
2583 // Adjust for return address and receiver.
2584 __ Daddu(a0, a0, Operand(2 * kPointerSize));
2585 // Compute copy end address. Also adjust for return address.
2586 __ Daddu(a7, fp, kPointerSize);
2587
2588 // Copy the arguments (including the receiver) to the new stack frame.
2589 // a0: copy start address
2590 // a1: function
2591 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002592 // a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002593 // a7: copy end address
2594 Label copy;
2595 __ bind(&copy);
2596 __ ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver.
2597 __ Dsubu(sp, sp, kPointerSize);
2598 __ Dsubu(a0, a0, kPointerSize);
2599 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
2600 __ sd(a4, MemOperand(sp)); // In the delay slot.
2601
2602 // Fill the remaining expected arguments with undefined.
2603 // a1: function
2604 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002605 // a3: new target (passed through to callee)
2606 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002607 __ dsll(a6, a2, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002608 __ Dsubu(a4, fp, Operand(a6));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002609 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610 __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2611 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002612
2613 Label fill;
2614 __ bind(&fill);
2615 __ Dsubu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002616 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
2617 __ sd(a5, MemOperand(sp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002618 }
2619
2620 // Call the entry point.
2621 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002622 __ mov(a0, a2);
2623 // a0 : expected number of arguments
2624 // a1 : function (passed through to callee)
2625 // a3: new target (passed through to callee)
2626 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2627 __ Call(a4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002628
2629 // Store offset of return address for deoptimizer.
2630 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2631
2632 // Exit frame and return.
2633 LeaveArgumentsAdaptorFrame(masm);
2634 __ Ret();
2635
2636
2637 // -------------------------------------------
2638 // Don't adapt arguments.
2639 // -------------------------------------------
2640 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002641 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2642 __ Jump(a4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002643
2644 __ bind(&stack_overflow);
2645 {
2646 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002647 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002648 __ break_(0xCC);
2649 }
2650}
2651
2652
2653#undef __
2654
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002655} // namespace internal
2656} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002657
2658#endif // V8_TARGET_ARCH_MIPS64