blob: 76d0640bc9a303003be8dd0bbd44514a35f37fec [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
Andrei Popescu31002712010-02-23 13:46:05 +000013
14namespace v8 {
15namespace internal {
16
17
18#define __ ACCESS_MASM(masm)
19
20
21void Builtins::Generate_Adaptor(MacroAssembler* masm,
22 CFunctionId id,
23 BuiltinExtraArguments extra_args) {
Ben Murdoch257744e2011-11-30 15:57:28 +000024 // ----------- S t a t e -------------
25 // -- a0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000026 // -- a1 : target
27 // -- a3 : new.target
Ben Murdoch257744e2011-11-30 15:57:28 +000028 // -- sp[0] : last argument
29 // -- ...
30 // -- sp[4 * (argc - 1)] : first argument
31 // -- sp[4 * agrc] : receiver
32 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 __ AssertFunction(a1);
34
35 // Make sure we operate in the context of the called function (for example
36 // ConstructStubs implemented in C++ will be run in the context of the caller
37 // instead of the callee, due to the way that [[Construct]] is defined for
38 // ordinary functions).
39 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +000040
41 // Insert extra arguments.
42 int num_extra_args = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 switch (extra_args) {
44 case BuiltinExtraArguments::kTarget:
45 __ Push(a1);
46 ++num_extra_args;
47 break;
48 case BuiltinExtraArguments::kNewTarget:
49 __ Push(a3);
50 ++num_extra_args;
51 break;
52 case BuiltinExtraArguments::kTargetAndNewTarget:
53 __ Push(a1, a3);
54 num_extra_args += 2;
55 break;
56 case BuiltinExtraArguments::kNone:
57 break;
Ben Murdoch257744e2011-11-30 15:57:28 +000058 }
59
Emily Bernierd0a1eb72015-03-24 16:35:39 -040060 // JumpToExternalReference expects a0 to contain the number of arguments
Ben Murdoch257744e2011-11-30 15:57:28 +000061 // including the receiver and the extra arguments.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062 __ Addu(a0, a0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063
Ben Murdoch257744e2011-11-30 15:57:28 +000064 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
65}
66
67
Ben Murdoch3ef787d2012-04-12 10:51:47 +010068// Load the built-in InternalArray function from the current context.
69static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
70 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000072 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010073}
74
75
Ben Murdoch257744e2011-11-30 15:57:28 +000076// Load the built-in Array function from the current context.
77static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000078 // Load the Array function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000079 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdoch257744e2011-11-30 15:57:28 +000080}
81
82
Ben Murdoch3ef787d2012-04-12 10:51:47 +010083void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
84 // ----------- S t a t e -------------
85 // -- a0 : number of arguments
86 // -- ra : return address
87 // -- sp[...]: constructor arguments
88 // -----------------------------------
89 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
90
91 // Get the InternalArray function.
92 GenerateLoadInternalArrayFunction(masm, a1);
93
94 if (FLAG_debug_code) {
95 // Initial map for the builtin InternalArray functions should be maps.
96 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000097 __ SmiTst(a2, t0);
98 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +010099 t0, Operand(zero_reg));
100 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100102 t0, Operand(MAP_TYPE));
103 }
104
105 // Run the native code for the InternalArray function called as a normal
106 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 // Tail call a stub.
108 InternalArrayConstructorStub stub(masm->isolate());
109 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +0000110}
111
112
113void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000114 // ----------- S t a t e -------------
115 // -- a0 : number of arguments
116 // -- ra : return address
117 // -- sp[...]: constructor arguments
118 // -----------------------------------
119 Label generic_array_code;
120
121 // Get the Array function.
122 GenerateLoadArrayFunction(masm, a1);
123
124 if (FLAG_debug_code) {
125 // Initial map for the builtin Array functions should be maps.
126 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127 __ SmiTst(a2, t0);
128 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
Ben Murdoch257744e2011-11-30 15:57:28 +0000129 t0, Operand(zero_reg));
130 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
Ben Murdoch257744e2011-11-30 15:57:28 +0000132 t0, Operand(MAP_TYPE));
133 }
134
135 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000136 // Tail call a stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000137 __ mov(a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
139 ArrayConstructorStub stub(masm->isolate());
140 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +0000141}
142
143
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000144// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100145void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
146 // ----------- S t a t e -------------
147 // -- a0 : number of arguments
148 // -- ra : return address
149 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
150 // -- sp[(argc + 1) * 8] : receiver
151 // -----------------------------------
Ben Murdoch097c5b22016-05-18 11:27:45 +0100152 Heap::RootListIndex const root_index =
153 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
154 : Heap::kMinusInfinityValueRootIndex;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100155
156 // Load the accumulator with the default return value (either -Infinity or
157 // +Infinity), with the tagged value in a1 and the double value in f0.
158 __ LoadRoot(a1, root_index);
159 __ ldc1(f0, FieldMemOperand(a1, HeapNumber::kValueOffset));
Ben Murdochda12d292016-06-02 14:46:10 +0100160 __ Addu(a3, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100161
162 Label done_loop, loop;
163 __ bind(&loop);
164 {
165 // Check if all parameters done.
166 __ Subu(a0, a0, Operand(1));
167 __ Branch(&done_loop, lt, a0, Operand(zero_reg));
168
169 // Load the next parameter tagged value into a2.
170 __ Lsa(at, sp, a0, kPointerSizeLog2);
171 __ lw(a2, MemOperand(at));
172
173 // Load the double value of the parameter into f2, maybe converting the
174 // parameter to a number first using the ToNumberStub if necessary.
175 Label convert, convert_smi, convert_number, done_convert;
176 __ bind(&convert);
177 __ JumpIfSmi(a2, &convert_smi);
178 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset));
179 __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number);
180 {
181 // Parameter is not a Number, use the ToNumberStub to convert it.
182 FrameScope scope(masm, StackFrame::INTERNAL);
183 __ SmiTag(a0);
184 __ SmiTag(a3);
185 __ Push(a0, a1, a3);
186 __ mov(a0, a2);
187 ToNumberStub stub(masm->isolate());
188 __ CallStub(&stub);
189 __ mov(a2, v0);
190 __ Pop(a0, a1, a3);
191 {
192 // Restore the double accumulator value (f0).
193 Label restore_smi, done_restore;
194 __ JumpIfSmi(a1, &restore_smi);
195 __ ldc1(f0, FieldMemOperand(a1, HeapNumber::kValueOffset));
196 __ jmp(&done_restore);
197 __ bind(&restore_smi);
198 __ SmiToDoubleFPURegister(a1, f0, t0);
199 __ bind(&done_restore);
200 }
201 __ SmiUntag(a3);
202 __ SmiUntag(a0);
203 }
204 __ jmp(&convert);
205 __ bind(&convert_number);
206 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
207 __ jmp(&done_convert);
208 __ bind(&convert_smi);
209 __ SmiToDoubleFPURegister(a2, f2, t0);
210 __ bind(&done_convert);
211
Ben Murdochda12d292016-06-02 14:46:10 +0100212 // Perform the actual comparison with using Min/Max macro instructions the
213 // accumulator value on the left hand side (f0) and the next parameter value
214 // on the right hand side (f2).
215 // We need to work out which HeapNumber (or smi) the result came from.
216 Label compare_nan, set_value;
217 __ BranchF(nullptr, &compare_nan, eq, f0, f2);
218 __ Move(t0, t1, f0);
219 if (kind == MathMaxMinKind::kMin) {
220 __ MinNaNCheck_d(f0, f0, f2);
221 } else {
222 DCHECK(kind == MathMaxMinKind::kMax);
223 __ MaxNaNCheck_d(f0, f0, f2);
224 }
225 __ Move(at, t8, f0);
226 __ Branch(&set_value, ne, t0, Operand(at));
227 __ Branch(&set_value, ne, t1, Operand(t8));
228 __ jmp(&loop);
229 __ bind(&set_value);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100230 __ mov(a1, a2);
231 __ jmp(&loop);
232
233 // At least one side is NaN, which means that the result will be NaN too.
234 __ bind(&compare_nan);
235 __ LoadRoot(a1, Heap::kNanValueRootIndex);
236 __ ldc1(f0, FieldMemOperand(a1, HeapNumber::kValueOffset));
237 __ jmp(&loop);
238 }
239
240 __ bind(&done_loop);
241 __ Lsa(sp, sp, a3, kPointerSizeLog2);
Ben Murdochda12d292016-06-02 14:46:10 +0100242 __ Ret(USE_DELAY_SLOT);
243 __ mov(v0, a1); // In delay slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100244}
245
246// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000248 // ----------- S t a t e -------------
249 // -- a0 : number of arguments
250 // -- a1 : constructor function
251 // -- ra : return address
252 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
253 // -- sp[argc * 4] : receiver
254 // -----------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 // 1. Load the first argument into a0 and get rid of the rest (including the
257 // receiver).
258 Label no_arguments;
259 {
260 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
261 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100262 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263 __ lw(a0, MemOperand(sp));
264 __ Drop(2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000265 }
266
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 // 2a. Convert first argument to number.
268 ToNumberStub stub(masm->isolate());
269 __ TailCallStub(&stub);
Ben Murdoch257744e2011-11-30 15:57:28 +0000270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271 // 2b. No arguments, return +0.
272 __ bind(&no_arguments);
273 __ Move(v0, Smi::FromInt(0));
274 __ DropAndRet(1);
275}
Ben Murdoch257744e2011-11-30 15:57:28 +0000276
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000277
278// static
279void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000280 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000281 // -- a0 : number of arguments
282 // -- a1 : constructor function
283 // -- a3 : new target
284 // -- ra : return address
285 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
286 // -- sp[argc * 4] : receiver
Ben Murdoch257744e2011-11-30 15:57:28 +0000287 // -----------------------------------
288
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000289 // 1. Make sure we operate in the context of the called function.
290 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000292 // 2. Load the first argument into a0 and get rid of the rest (including the
293 // receiver).
294 {
295 Label no_arguments, done;
296 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
297 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100298 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299 __ lw(a0, MemOperand(sp));
300 __ Drop(2);
301 __ jmp(&done);
302 __ bind(&no_arguments);
303 __ Move(a0, Smi::FromInt(0));
304 __ Drop(1);
305 __ bind(&done);
Ben Murdoch257744e2011-11-30 15:57:28 +0000306 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000307
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308 // 3. Make sure a0 is a number.
309 {
310 Label done_convert;
311 __ JumpIfSmi(a0, &done_convert);
312 __ GetObjectType(a0, a2, a2);
313 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
314 {
315 FrameScope scope(masm, StackFrame::INTERNAL);
316 __ Push(a1, a3);
317 ToNumberStub stub(masm->isolate());
318 __ CallStub(&stub);
319 __ Move(a0, v0);
320 __ Pop(a1, a3);
321 }
322 __ bind(&done_convert);
323 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000324
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 // 4. Check if new target and constructor differ.
326 Label new_object;
327 __ Branch(&new_object, ne, a1, Operand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +0000328
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000329 // 5. Allocate a JSValue wrapper for the number.
330 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
Ben Murdoch257744e2011-11-30 15:57:28 +0000331 __ Ret();
332
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333 // 6. Fallback to the runtime to create new object.
334 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100335 {
336 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100337 __ Push(a0); // first argument
338 FastNewObjectStub stub(masm->isolate());
339 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 __ Pop(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100341 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000342 __ Ret(USE_DELAY_SLOT);
343 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot
344}
Ben Murdoch257744e2011-11-30 15:57:28 +0000345
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000346
347// static
348void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
349 // ----------- S t a t e -------------
350 // -- a0 : number of arguments
351 // -- a1 : constructor function
352 // -- ra : return address
353 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
354 // -- sp[argc * 4] : receiver
355 // -----------------------------------
356
357 // 1. Load the first argument into a0 and get rid of the rest (including the
358 // receiver).
359 Label no_arguments;
360 {
361 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
362 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100363 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000364 __ lw(a0, MemOperand(sp));
365 __ Drop(2);
366 }
367
368 // 2a. At least one argument, return a0 if it's a string, otherwise
369 // dispatch to appropriate conversion.
370 Label to_string, symbol_descriptive_string;
371 {
372 __ JumpIfSmi(a0, &to_string);
373 __ GetObjectType(a0, a1, a1);
374 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
375 __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
376 __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
377 __ Branch(&to_string, gt, a1, Operand(zero_reg));
378 __ Ret(USE_DELAY_SLOT);
379 __ mov(v0, a0);
380 }
381
382 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdoch257744e2011-11-30 15:57:28 +0000383 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000384 {
385 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
386 __ DropAndRet(1);
387 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000388
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000389 // 3a. Convert a0 to a string.
390 __ bind(&to_string);
391 {
392 ToStringStub stub(masm->isolate());
393 __ TailCallStub(&stub);
394 }
395
396 // 3b. Convert symbol in a0 to a string.
397 __ bind(&symbol_descriptive_string);
398 {
399 __ Push(a0);
400 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
401 }
402}
403
404
405// static
406void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
407 // ----------- S t a t e -------------
408 // -- a0 : number of arguments
409 // -- a1 : constructor function
410 // -- a3 : new target
411 // -- ra : return address
412 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
413 // -- sp[argc * 4] : receiver
414 // -----------------------------------
415
416 // 1. Make sure we operate in the context of the called function.
417 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
418
419 // 2. Load the first argument into a0 and get rid of the rest (including the
420 // receiver).
421 {
422 Label no_arguments, done;
423 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
424 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100425 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000426 __ lw(a0, MemOperand(sp));
427 __ Drop(2);
428 __ jmp(&done);
429 __ bind(&no_arguments);
430 __ LoadRoot(a0, Heap::kempty_stringRootIndex);
431 __ Drop(1);
432 __ bind(&done);
433 }
434
435 // 3. Make sure a0 is a string.
436 {
437 Label convert, done_convert;
438 __ JumpIfSmi(a0, &convert);
439 __ GetObjectType(a0, a2, a2);
440 __ And(t0, a2, Operand(kIsNotStringMask));
441 __ Branch(&done_convert, eq, t0, Operand(zero_reg));
442 __ bind(&convert);
443 {
444 FrameScope scope(masm, StackFrame::INTERNAL);
445 ToStringStub stub(masm->isolate());
446 __ Push(a1, a3);
447 __ CallStub(&stub);
448 __ Move(a0, v0);
449 __ Pop(a1, a3);
450 }
451 __ bind(&done_convert);
452 }
453
454 // 4. Check if new target and constructor differ.
455 Label new_object;
456 __ Branch(&new_object, ne, a1, Operand(a3));
457
458 // 5. Allocate a JSValue wrapper for the string.
459 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
460 __ Ret();
461
462 // 6. Fallback to the runtime to create new object.
463 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100464 {
465 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100466 __ Push(a0); // first argument
467 FastNewObjectStub stub(masm->isolate());
468 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000469 __ Pop(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100470 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000471 __ Ret(USE_DELAY_SLOT);
472 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot
Steve Block44f0eee2011-05-26 01:26:41 +0100473}
474
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
476 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
477 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
478 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
479 __ Jump(at);
480}
481
Ben Murdoch097c5b22016-05-18 11:27:45 +0100482static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
483 Runtime::FunctionId function_id) {
484 // ----------- S t a t e -------------
485 // -- a0 : argument count (preserved for callee)
486 // -- a1 : target function (preserved for callee)
487 // -- a3 : new target (preserved for callee)
488 // -----------------------------------
489 {
490 FrameScope scope(masm, StackFrame::INTERNAL);
491 // Push a copy of the target function and the new target.
492 // Push function as parameter to the runtime call.
493 __ SmiTag(a0);
494 __ Push(a0, a1, a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495
Ben Murdoch097c5b22016-05-18 11:27:45 +0100496 __ CallRuntime(function_id, 1);
497
498 // Restore target function and new target.
499 __ Pop(a0, a1, a3);
500 __ SmiUntag(a0);
501 }
502
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000503 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
504 __ Jump(at);
505}
506
507
508void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
509 // Checking whether the queued function is ready for install is optional,
510 // since we come across interrupts and stack checks elsewhere. However,
511 // not checking may delay installing ready functions, and always checking
512 // would be quite expensive. A good compromise is to first check against
513 // stack limit as a cue for an interrupt signal.
514 Label ok;
515 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
516 __ Branch(&ok, hs, sp, Operand(t0));
517
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000519
520 __ bind(&ok);
521 GenerateTailCallToSharedCode(masm);
522}
523
524
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525static void Generate_JSConstructStubHelper(MacroAssembler* masm,
526 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100527 bool create_implicit_receiver,
528 bool check_derived_construct) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000529 // ----------- S t a t e -------------
530 // -- a0 : number of arguments
531 // -- a1 : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532 // -- a2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 // -- a3 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100534 // -- cp : context
Ben Murdoch257744e2011-11-30 15:57:28 +0000535 // -- ra : return address
536 // -- sp[...]: constructor arguments
537 // -----------------------------------
538
Ben Murdoch257744e2011-11-30 15:57:28 +0000539 Isolate* isolate = masm->isolate();
540
Ben Murdoch257744e2011-11-30 15:57:28 +0000541 // Enter a construct frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100542 {
543 FrameScope scope(masm, StackFrame::CONSTRUCT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 // Preserve the incoming parameters on the stack.
546 __ AssertUndefinedOrAllocationSite(a2, t0);
547 __ SmiTag(a0);
Ben Murdochda12d292016-06-02 14:46:10 +0100548 __ Push(cp, a2, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000549
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100551 // Allocate the new receiver object.
552 __ Push(a1, a3);
553 FastNewObjectStub stub(masm->isolate());
554 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555 __ mov(t4, v0);
556 __ Pop(a1, a3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100557
Ben Murdoch097c5b22016-05-18 11:27:45 +0100558 // ----------- S t a t e -------------
559 // -- a1: constructor function
560 // -- a3: new target
561 // -- t0: newly allocated object
562 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100563
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000564 // Retrieve smi-tagged arguments count from the stack.
565 __ lw(a0, MemOperand(sp));
Ben Murdoch257744e2011-11-30 15:57:28 +0000566 }
567
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000568 __ SmiUntag(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000570 if (create_implicit_receiver) {
571 // Push the allocated receiver to the stack. We need two copies
572 // because we may have to return the original one and the calling
573 // conventions dictate that the called function pops the receiver.
574 __ Push(t4, t4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000575 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000576 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000578
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100579 // Set up pointer to last argument.
580 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000581
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 // Copy arguments and receiver to the expression stack.
583 // a0: number of arguments
584 // a1: constructor function
585 // a2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 // a3: new target
587 // t4: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100588 // sp[0]: receiver
589 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590 // sp[2]: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 Label loop, entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 __ SmiTag(t4, a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593 __ jmp(&entry);
594 __ bind(&loop);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100595 __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100596 __ lw(t1, MemOperand(t0));
597 __ push(t1);
598 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000599 __ Addu(t4, t4, Operand(-2));
600 __ Branch(&loop, greater_equal, t4, Operand(zero_reg));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000601
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100602 // Call the function.
603 // a0: number of arguments
604 // a1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 // a3: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100606 ParameterCount actual(a0);
607 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
608 CheckDebugStepCallWrapper());
Ben Murdoch257744e2011-11-30 15:57:28 +0000609
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100610 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000611 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100612 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
613 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000614
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100615 // Restore context from the frame.
Ben Murdochda12d292016-06-02 14:46:10 +0100616 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000617
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000618 if (create_implicit_receiver) {
619 // If the result is an object (in the ECMA sense), we should get rid
620 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
621 // on page 74.
622 Label use_receiver, exit;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100623
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 // If the result is a smi, it is *not* an object in the ECMA sense.
625 // v0: result
626 // sp[0]: receiver (newly allocated object)
627 // sp[1]: number of arguments (smi-tagged)
628 __ JumpIfSmi(v0, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100629
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630 // If the type of the result (stored in its map) is less than
631 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
632 __ GetObjectType(v0, a1, a3);
633 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635 // Throw away the result of the constructor invocation and use the
636 // on-stack receiver as the result.
637 __ bind(&use_receiver);
638 __ lw(v0, MemOperand(sp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100639
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000640 // Remove receiver from the stack, remove caller arguments, and
641 // return.
642 __ bind(&exit);
643 // v0: result
644 // sp[0]: receiver (newly allocated object)
645 // sp[1]: number of arguments (smi-tagged)
646 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
647 } else {
648 __ lw(a1, MemOperand(sp));
649 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100650
651 // Leave construct frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000652 }
653
Ben Murdoch097c5b22016-05-18 11:27:45 +0100654 // ES6 9.2.2. Step 13+
655 // Check that the result is not a Smi, indicating that the constructor result
656 // from a derived class is neither undefined nor an Object.
657 if (check_derived_construct) {
658 Label dont_throw;
659 __ JumpIfNotSmi(v0, &dont_throw);
660 {
661 FrameScope scope(masm, StackFrame::INTERNAL);
662 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
663 }
664 __ bind(&dont_throw);
665 }
666
667 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000668 __ Addu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000669 if (create_implicit_receiver) {
670 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
671 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000672 __ Ret();
Andrei Popescu31002712010-02-23 13:46:05 +0000673}
674
675
676void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100677 Generate_JSConstructStubHelper(masm, false, true, false);
Andrei Popescu31002712010-02-23 13:46:05 +0000678}
679
680
681void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100682 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000683}
684
685
686void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100687 Generate_JSConstructStubHelper(masm, false, false, false);
688}
689
690
691void Builtins::Generate_JSBuiltinsConstructStubForDerived(
692 MacroAssembler* masm) {
693 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694}
695
696
697void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
698 FrameScope scope(masm, StackFrame::INTERNAL);
699 __ Push(a1);
700 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
701}
702
703
704enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
705
706
707// Clobbers a2; preserves all other registers.
708static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
709 IsTagged argc_is_tagged) {
710 // Check the stack for overflow. We are not trying to catch
711 // interruptions (e.g. debug break and preemption) here, so the "real stack
712 // limit" is checked.
713 Label okay;
714 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
715 // Make a2 the space we have left. The stack might already be overflowed
716 // here which will cause a2 to become negative.
717 __ Subu(a2, sp, a2);
718 // Check if the arguments will overflow the stack.
719 if (argc_is_tagged == kArgcIsSmiTagged) {
720 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
721 } else {
722 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
723 __ sll(t3, argc, kPointerSizeLog2);
724 }
725 // Signed comparison.
726 __ Branch(&okay, gt, a2, Operand(t3));
727
728 // Out of stack space.
729 __ CallRuntime(Runtime::kThrowStackOverflow);
730
731 __ bind(&okay);
Ben Murdoch257744e2011-11-30 15:57:28 +0000732}
733
734
735static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
736 bool is_construct) {
737 // Called from JSEntryStub::GenerateBody
738
739 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740 // -- a0: new.target
Ben Murdoch257744e2011-11-30 15:57:28 +0000741 // -- a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100742 // -- a2: receiver_pointer
Ben Murdoch257744e2011-11-30 15:57:28 +0000743 // -- a3: argc
744 // -- s0: argv
745 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000746 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdoch257744e2011-11-30 15:57:28 +0000747
Ben Murdoch257744e2011-11-30 15:57:28 +0000748 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100749 {
750 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +0000751
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000752 // Setup the context (we need to use the caller context from the isolate).
753 ExternalReference context_address(Isolate::kContextAddress,
754 masm->isolate());
755 __ li(cp, Operand(context_address));
756 __ lw(cp, MemOperand(cp));
Ben Murdoch257744e2011-11-30 15:57:28 +0000757
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100758 // Push the function and the receiver onto the stack.
759 __ Push(a1, a2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000760
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000761 // Check if we have enough stack space to push all arguments.
762 // Clobbers a2.
763 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
764
765 // Remember new.target.
766 __ mov(t1, a0);
767
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100768 // Copy arguments to the stack in a loop.
769 // a3: argc
770 // s0: argv, i.e. points to first arg
771 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100772 __ Lsa(t2, s0, a3, kPointerSizeLog2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100773 __ b(&entry);
774 __ nop(); // Branch delay slot nop.
775 // t2 points past last arg.
776 __ bind(&loop);
777 __ lw(t0, MemOperand(s0)); // Read next parameter.
778 __ addiu(s0, s0, kPointerSize);
779 __ lw(t0, MemOperand(t0)); // Dereference handle.
780 __ push(t0); // Push parameter.
781 __ bind(&entry);
782 __ Branch(&loop, ne, s0, Operand(t2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000783
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000784 // Setup new.target and argc.
785 __ mov(a0, a3);
786 __ mov(a3, t1);
787
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100788 // Initialize all JavaScript callee-saved registers, since they will be seen
789 // by the garbage collector as part of handlers.
790 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
791 __ mov(s1, t0);
792 __ mov(s2, t0);
793 __ mov(s3, t0);
794 __ mov(s4, t0);
795 __ mov(s5, t0);
796 // s6 holds the root address. Do not clobber.
797 // s7 is cp. Do not init.
Ben Murdoch257744e2011-11-30 15:57:28 +0000798
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000799 // Invoke the code.
800 Handle<Code> builtin = is_construct
801 ? masm->isolate()->builtins()->Construct()
802 : masm->isolate()->builtins()->Call();
803 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000804
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100805 // Leave internal frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000806 }
807
Ben Murdoch257744e2011-11-30 15:57:28 +0000808 __ Jump(ra);
Andrei Popescu31002712010-02-23 13:46:05 +0000809}
810
811
Andrei Popescu31002712010-02-23 13:46:05 +0000812void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000813 Generate_JSEntryTrampolineHelper(masm, false);
Andrei Popescu31002712010-02-23 13:46:05 +0000814}
815
816
817void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000818 Generate_JSEntryTrampolineHelper(masm, true);
Steve Block44f0eee2011-05-26 01:26:41 +0100819}
820
Ben Murdochc5610432016-08-08 18:44:38 +0100821// static
822void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
823 // ----------- S t a t e -------------
824 // -- v0 : the value to pass to the generator
825 // -- a1 : the JSGeneratorObject to resume
826 // -- a2 : the resume mode (tagged)
827 // -- ra : return address
828 // -----------------------------------
829 __ AssertGeneratorObject(a1);
830
831 // Store input value into generator object.
832 __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOffset));
833 __ RecordWriteField(a1, JSGeneratorObject::kInputOffset, v0, a3,
834 kRAHasNotBeenSaved, kDontSaveFPRegs);
835
836 // Store resume mode into generator object.
837 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
838
839 // Load suspended function and context.
840 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
841 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
842
843 // Flood function if we are stepping.
844 Label skip_flooding;
845 ExternalReference step_in_enabled =
846 ExternalReference::debug_step_in_enabled_address(masm->isolate());
847 __ li(t1, Operand(step_in_enabled));
848 __ lb(t1, MemOperand(t1));
849 __ Branch(&skip_flooding, eq, t1, Operand(zero_reg));
850 {
851 FrameScope scope(masm, StackFrame::INTERNAL);
852 __ Push(a1, a2, t0);
853 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
854 __ Pop(a1, a2);
855 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
856 }
857 __ bind(&skip_flooding);
858
859 // Push receiver.
860 __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
861 __ Push(t1);
862
863 // ----------- S t a t e -------------
864 // -- a1 : the JSGeneratorObject to resume
865 // -- a2 : the resume mode (tagged)
866 // -- t0 : generator function
867 // -- cp : generator context
868 // -- ra : return address
869 // -- sp[0] : generator receiver
870 // -----------------------------------
871
872 // Push holes for arguments to generator function. Since the parser forced
873 // context allocation for any variables in generators, the actual argument
874 // values have already been copied into the context and these dummy values
875 // will never be used.
876 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
877 __ lw(a3,
878 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
879 {
880 Label done_loop, loop;
881 __ bind(&loop);
882 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
883 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
884 __ PushRoot(Heap::kTheHoleValueRootIndex);
885 __ Branch(&loop);
886 __ bind(&done_loop);
887 }
888
889 // Dispatch on the kind of generator object.
890 Label old_generator;
891 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
892 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
893 __ GetObjectType(a3, a3, a3);
894 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE));
895
896 // New-style (ignition/turbofan) generator object.
897 {
898 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
899 __ lw(a0,
900 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
901 __ SmiUntag(a0);
902 // We abuse new.target both to indicate that this is a resume call and to
903 // pass in the generator object. In ordinary calls, new.target is always
904 // undefined because generator functions are non-constructable.
905 __ Move(a3, a1);
906 __ Move(a1, t0);
907 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
908 __ Jump(a2);
909 }
910
911 // Old-style (full-codegen) generator object
912 __ bind(&old_generator);
913 {
914 // Enter a new JavaScript frame, and initialize its slots as they were when
915 // the generator was suspended.
916 FrameScope scope(masm, StackFrame::MANUAL);
917 __ Push(ra, fp);
918 __ Move(fp, sp);
919 __ Push(cp, t0);
920
921 // Restore the operand stack.
922 __ lw(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
923 __ lw(a3, FieldMemOperand(a0, FixedArray::kLengthOffset));
924 __ Addu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
925 __ Lsa(a3, a0, a3, kPointerSizeLog2 - 1);
926 {
927 Label done_loop, loop;
928 __ bind(&loop);
929 __ Branch(&done_loop, eq, a0, Operand(a3));
930 __ lw(t1, MemOperand(a0));
931 __ Push(t1);
932 __ Branch(USE_DELAY_SLOT, &loop);
933 __ addiu(a0, a0, kPointerSize); // In delay slot.
934 __ bind(&done_loop);
935 }
936
937 // Reset operand stack so we don't leak.
938 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
939 __ sw(t1, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
940
941 // Resume the generator function at the continuation.
942 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
943 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
944 __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
945 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
946 __ SmiUntag(a2);
947 __ Addu(a3, a3, Operand(a2));
948 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
949 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
950 __ Move(v0, a1); // Continuation expects generator object in v0.
951 __ Jump(a3);
952 }
953}
Steve Block44f0eee2011-05-26 01:26:41 +0100954
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000955// Generate code for entering a JS function with the interpreter.
956// On entry to the function the receiver and arguments have been pushed on the
957// stack left to right. The actual argument count matches the formal parameter
958// count expected by the function.
959//
960// The live registers are:
961// o a1: the JS function object being called.
962// o a3: the new target
963// o cp: our context
964// o fp: the caller's frame pointer
965// o sp: stack pointer
966// o ra: return address
967//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100968// The function builds an interpreter frame. See InterpreterFrameConstants in
969// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000970void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100971 ProfileEntryHookStub::MaybeCallEntryHook(masm);
972
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000973 // Open a frame scope to indicate that there is a frame on the stack. The
974 // MANUAL indicates that the scope shouldn't actually generate code to set up
975 // the frame (that is done below).
976 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +0100977 __ PushStandardFrame(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000978
Ben Murdochc5610432016-08-08 18:44:38 +0100979 // Get the bytecode array from the function object (or from the DebugInfo if
980 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000981 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100982 Label load_debug_bytecode_array, bytecode_array_loaded;
983 Register debug_info = kInterpreterBytecodeArrayRegister;
984 DCHECK(!debug_info.is(a0));
985 __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
986 __ Branch(&load_debug_bytecode_array, ne, debug_info,
987 Operand(DebugInfo::uninitialized()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988 __ lw(kInterpreterBytecodeArrayRegister,
989 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100990 __ bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000991
Ben Murdochc5610432016-08-08 18:44:38 +0100992 // Check function data field is actually a BytecodeArray object.
993 Label bytecode_array_not_present;
994 __ JumpIfRoot(kInterpreterBytecodeArrayRegister,
995 Heap::kUndefinedValueRootIndex, &bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000997 __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
998 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
999 Operand(zero_reg));
1000 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
1001 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1002 Operand(BYTECODE_ARRAY_TYPE));
1003 }
1004
Ben Murdochc5610432016-08-08 18:44:38 +01001005 // Load initial bytecode offset.
1006 __ li(kInterpreterBytecodeOffsetRegister,
1007 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1008
1009 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1010 __ SmiTag(t0, kInterpreterBytecodeOffsetRegister);
1011 __ Push(a3, kInterpreterBytecodeArrayRegister, t0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001012
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013 // Allocate the local and temporary register file on the stack.
1014 {
1015 // Load frame size from the BytecodeArray object.
1016 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1017 BytecodeArray::kFrameSizeOffset));
1018
1019 // Do a stack check to ensure we don't go over the limit.
1020 Label ok;
1021 __ Subu(t1, sp, Operand(t0));
1022 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1023 __ Branch(&ok, hs, t1, Operand(a2));
1024 __ CallRuntime(Runtime::kThrowStackOverflow);
1025 __ bind(&ok);
1026
1027 // If ok, push undefined as the initial value for all register file entries.
1028 Label loop_header;
1029 Label loop_check;
1030 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
1031 __ Branch(&loop_check);
1032 __ bind(&loop_header);
1033 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1034 __ push(t1);
1035 // Continue loop if not done.
1036 __ bind(&loop_check);
1037 __ Subu(t0, t0, Operand(kPointerSize));
1038 __ Branch(&loop_header, ge, t0, Operand(zero_reg));
1039 }
1040
Ben Murdochc5610432016-08-08 18:44:38 +01001041 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001043 __ li(kInterpreterDispatchTableRegister,
1044 Operand(ExternalReference::interpreter_dispatch_table_address(
1045 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001046
1047 // Dispatch to the first bytecode handler for the function.
1048 __ Addu(a0, kInterpreterBytecodeArrayRegister,
1049 kInterpreterBytecodeOffsetRegister);
1050 __ lbu(a0, MemOperand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001051 __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001052 __ lw(at, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001053 __ Call(at);
Ben Murdochc5610432016-08-08 18:44:38 +01001054 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001055
Ben Murdochc5610432016-08-08 18:44:38 +01001056 // The return value is in v0.
1057
1058 // Get the arguments + reciever count.
1059 __ lw(t0, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1060 __ lw(t0, FieldMemOperand(t0, BytecodeArray::kParameterSizeOffset));
1061
1062 // Leave the frame (also dropping the register file).
1063 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1064
1065 // Drop receiver + arguments and return.
1066 __ Addu(sp, sp, t0);
1067 __ Jump(ra);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001068
1069 // Load debug copy of the bytecode array.
1070 __ bind(&load_debug_bytecode_array);
1071 __ lw(kInterpreterBytecodeArrayRegister,
1072 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1073 __ Branch(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074
Ben Murdochc5610432016-08-08 18:44:38 +01001075 // If the bytecode array is no longer present, then the underlying function
1076 // has been switched to a different kind of code and we heal the closure by
1077 // switching the code entry field over to the new code object as well.
1078 __ bind(&bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
Ben Murdochc5610432016-08-08 18:44:38 +01001080 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1081 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset));
1082 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1083 __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1084 __ RecordWriteCodeEntryField(a1, t0, t1);
1085 __ Jump(t0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001086}
1087
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001088// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001089void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1090 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001091 // ----------- S t a t e -------------
1092 // -- a0 : the number of arguments (not including the receiver)
1093 // -- a2 : the address of the first argument to be pushed. Subsequent
1094 // arguments should be consecutive above this, in the same order as
1095 // they are to be pushed onto the stack.
1096 // -- a1 : the target to call (can be any Object).
1097 // -----------------------------------
1098
1099 // Find the address of the last argument.
1100 __ Addu(a3, a0, Operand(1)); // Add one for receiver.
1101 __ sll(a3, a3, kPointerSizeLog2);
1102 __ Subu(a3, a2, Operand(a3));
1103
1104 // Push the arguments.
1105 Label loop_header, loop_check;
1106 __ Branch(&loop_check);
1107 __ bind(&loop_header);
1108 __ lw(t0, MemOperand(a2));
1109 __ Addu(a2, a2, Operand(-kPointerSize));
1110 __ push(t0);
1111 __ bind(&loop_check);
1112 __ Branch(&loop_header, gt, a2, Operand(a3));
1113
1114 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001115 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1116 tail_call_mode),
1117 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001118}
1119
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001120// static
1121void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1122 // ----------- S t a t e -------------
1123 // -- a0 : argument count (not including receiver)
1124 // -- a3 : new target
1125 // -- a1 : constructor to call
1126 // -- a2 : address of the first argument
1127 // -----------------------------------
1128
1129 // Find the address of the last argument.
1130 __ sll(t0, a0, kPointerSizeLog2);
1131 __ Subu(t0, a2, Operand(t0));
1132
1133 // Push a slot for the receiver.
1134 __ push(zero_reg);
1135
1136 // Push the arguments.
1137 Label loop_header, loop_check;
1138 __ Branch(&loop_check);
1139 __ bind(&loop_header);
1140 __ lw(t1, MemOperand(a2));
1141 __ Addu(a2, a2, Operand(-kPointerSize));
1142 __ push(t1);
1143 __ bind(&loop_check);
1144 __ Branch(&loop_header, gt, a2, Operand(t0));
1145
1146 // Call the constructor with a0, a1, and a3 unmodified.
1147 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1148}
1149
Ben Murdochc5610432016-08-08 18:44:38 +01001150void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1151 // Set the return address to the correct point in the interpreter entry
1152 // trampoline.
1153 Smi* interpreter_entry_return_pc_offset(
1154 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1155 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1156 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1157 __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1158 Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001159
Ben Murdochc5610432016-08-08 18:44:38 +01001160 // Initialize the dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001161 __ li(kInterpreterDispatchTableRegister,
1162 Operand(ExternalReference::interpreter_dispatch_table_address(
1163 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001164
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001165 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001166 __ lw(kInterpreterBytecodeArrayRegister,
1167 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001168
1169 if (FLAG_debug_code) {
1170 // Check function data field is actually a BytecodeArray object.
1171 __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1172 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1173 Operand(zero_reg));
1174 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1175 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1176 Operand(BYTECODE_ARRAY_TYPE));
1177 }
1178
1179 // Get the target bytecode offset from the frame.
1180 __ lw(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001181 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001182 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1183
1184 // Dispatch to the target bytecode.
1185 __ Addu(a1, kInterpreterBytecodeArrayRegister,
1186 kInterpreterBytecodeOffsetRegister);
1187 __ lbu(a1, MemOperand(a1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001188 __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 __ lw(a1, MemOperand(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001190 __ Jump(a1);
1191}
1192
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001193void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001194 // ----------- S t a t e -------------
1195 // -- a0 : argument count (preserved for callee)
1196 // -- a3 : new target (preserved for callee)
1197 // -- a1 : target function (preserved for callee)
1198 // -----------------------------------
1199 // First lookup code, maybe we don't need to compile!
1200 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1201 Label maybe_call_runtime;
1202 Label try_shared;
1203 Label loop_top, loop_bottom;
1204
1205 Register argument_count = a0;
1206 Register closure = a1;
1207 Register new_target = a3;
1208 __ push(argument_count);
1209 __ push(new_target);
1210 __ push(closure);
1211
1212 Register map = a0;
1213 Register index = a2;
1214 __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1215 __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1216 __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1217 __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
1218
1219 // Find literals.
1220 // a3 : native context
1221 // a2 : length / index
1222 // a0 : optimized code map
1223 // stack[0] : new target
1224 // stack[4] : closure
1225 Register native_context = a3;
1226 __ lw(native_context, NativeContextMemOperand());
1227
1228 __ bind(&loop_top);
1229 Register temp = a1;
1230 Register array_pointer = t1;
1231
1232 // Does the native context match?
1233 __ sll(at, index, kPointerSizeLog2 - kSmiTagSize);
1234 __ Addu(array_pointer, map, Operand(at));
1235 __ lw(temp, FieldMemOperand(array_pointer,
1236 SharedFunctionInfo::kOffsetToPreviousContext));
1237 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1238 __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1239 // OSR id set to none?
1240 __ lw(temp, FieldMemOperand(array_pointer,
1241 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1242 const int bailout_id = BailoutId::None().ToInt();
1243 __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
1244 // Literals available?
1245 __ lw(temp, FieldMemOperand(array_pointer,
1246 SharedFunctionInfo::kOffsetToPreviousLiterals));
1247 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1248 __ JumpIfSmi(temp, &gotta_call_runtime);
1249
1250 // Save the literals in the closure.
1251 __ lw(t0, MemOperand(sp, 0));
1252 __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
1253 __ push(index);
1254 __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index,
1255 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1256 OMIT_SMI_CHECK);
1257 __ pop(index);
1258
1259 // Code available?
1260 Register entry = t0;
1261 __ lw(entry,
1262 FieldMemOperand(array_pointer,
1263 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1264 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1265 __ JumpIfSmi(entry, &maybe_call_runtime);
1266
1267 // Found literals and code. Get them into the closure and return.
1268 __ pop(closure);
1269 // Store code entry in the closure.
1270 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1271
1272 Label install_optimized_code_and_tailcall;
1273 __ bind(&install_optimized_code_and_tailcall);
1274 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1275 __ RecordWriteCodeEntryField(closure, entry, t1);
1276
1277 // Link the closure into the optimized function list.
1278 // t0 : code entry
1279 // a3 : native context
1280 // a1 : closure
1281 __ lw(t1,
1282 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1283 __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1284 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0,
1285 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1286 OMIT_SMI_CHECK);
1287 const int function_list_offset =
1288 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1289 __ sw(closure,
1290 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1291 // Save closure before the write barrier.
1292 __ mov(t1, closure);
1293 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1294 kRAHasNotBeenSaved, kDontSaveFPRegs);
1295 __ mov(closure, t1);
1296 __ pop(new_target);
1297 __ pop(argument_count);
1298 __ Jump(entry);
1299
1300 __ bind(&loop_bottom);
1301 __ Subu(index, index,
1302 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1303 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1304
1305 // We found neither literals nor code.
1306 __ jmp(&gotta_call_runtime);
1307
1308 __ bind(&maybe_call_runtime);
1309 __ pop(closure);
1310
1311 // Last possibility. Check the context free optimized code map entry.
1312 __ lw(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1313 SharedFunctionInfo::kSharedCodeIndex));
1314 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1315 __ JumpIfSmi(entry, &try_shared);
1316
1317 // Store code entry in the closure.
1318 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1319 __ jmp(&install_optimized_code_and_tailcall);
1320
1321 __ bind(&try_shared);
1322 __ pop(new_target);
1323 __ pop(argument_count);
1324 // Is the full code valid?
1325 __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1326 __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1327 __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset));
1328 __ And(t1, t1, Operand(Code::KindField::kMask));
1329 __ srl(t1, t1, Code::KindField::kShift);
1330 __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN));
1331 // Yes, install the full code.
1332 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1333 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1334 __ RecordWriteCodeEntryField(closure, entry, t1);
1335 __ Jump(entry);
1336
1337 __ bind(&gotta_call_runtime);
1338 __ pop(closure);
1339 __ pop(new_target);
1340 __ pop(argument_count);
1341 __ bind(&gotta_call_runtime_no_stack);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001342 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Steve Block44f0eee2011-05-26 01:26:41 +01001343}
1344
Ben Murdochc5610432016-08-08 18:44:38 +01001345void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1346 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1347}
Steve Block44f0eee2011-05-26 01:26:41 +01001348
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001349void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001350 GenerateTailCallToReturnedCode(masm,
1351 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352}
1353
1354
1355void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001356 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001357}
1358
1359
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001360static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1361 // For now, we are relying on the fact that make_code_young doesn't do any
1362 // garbage collection which allows us to save/restore the registers without
1363 // worrying about which of them contain pointers. We also don't build an
1364 // internal frame to make the code faster, since we shouldn't have to do stack
1365 // crawls in MakeCodeYoung. This seems a bit fragile.
1366
1367 // Set a0 to point to the head of the PlatformCodeAge sequence.
1368 __ Subu(a0, a0,
1369 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1370
1371 // The following registers must be saved and restored when calling through to
1372 // the runtime:
1373 // a0 - contains return address (beginning of patch sequence)
1374 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001375 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001376 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001377 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378 FrameScope scope(masm, StackFrame::MANUAL);
1379 __ MultiPush(saved_regs);
1380 __ PrepareCallCFunction(2, 0, a2);
1381 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1382 __ CallCFunction(
1383 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1384 __ MultiPop(saved_regs);
1385 __ Jump(a0);
1386}
1387
1388#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1389void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1390 MacroAssembler* masm) { \
1391 GenerateMakeCodeYoungAgainCommon(masm); \
1392} \
1393void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1394 MacroAssembler* masm) { \
1395 GenerateMakeCodeYoungAgainCommon(masm); \
1396}
1397CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1398#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1399
1400
1401void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1402 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1403 // that make_code_young doesn't do any garbage collection which allows us to
1404 // save/restore the registers without worrying about which of them contain
1405 // pointers.
1406
1407 // Set a0 to point to the head of the PlatformCodeAge sequence.
1408 __ Subu(a0, a0,
1409 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1410
1411 // The following registers must be saved and restored when calling through to
1412 // the runtime:
1413 // a0 - contains return address (beginning of patch sequence)
1414 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001415 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001416 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001417 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001418 FrameScope scope(masm, StackFrame::MANUAL);
1419 __ MultiPush(saved_regs);
1420 __ PrepareCallCFunction(2, 0, a2);
1421 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1422 __ CallCFunction(
1423 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1424 2);
1425 __ MultiPop(saved_regs);
1426
1427 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001428 __ PushStandardFrame(a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001429
1430 // Jump to point after the code-age stub.
1431 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1432 __ Jump(a0);
1433}
1434
1435
1436void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1437 GenerateMakeCodeYoungAgainCommon(masm);
1438}
1439
1440
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001441void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1442 Generate_MarkCodeAsExecutedOnce(masm);
1443}
1444
1445
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001446static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1447 SaveFPRegsMode save_doubles) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001448 {
1449 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +00001450
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001451 // Preserve registers across notification, this is important for compiled
1452 // stubs that tail call the runtime on deopts passing their parameters in
1453 // registers.
1454 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1455 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001456 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001457 __ MultiPop(kJSCallerSaved | kCalleeSaved);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001458 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001459
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001460 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
1461 __ Jump(ra); // Jump to miss handler
1462}
1463
1464
1465void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1466 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1467}
1468
1469
1470void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1471 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Steve Block44f0eee2011-05-26 01:26:41 +01001472}
1473
1474
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001475static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1476 Deoptimizer::BailoutType type) {
1477 {
1478 FrameScope scope(masm, StackFrame::INTERNAL);
1479 // Pass the function and deoptimization type to the runtime system.
1480 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1481 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001483 }
1484
1485 // Get the full codegen state from the stack and untag it -> t2.
1486 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1487 __ SmiUntag(t2);
1488 // Switch on the state.
1489 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001490 __ Branch(&with_tos_register, ne, t2,
1491 Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001492 __ Ret(USE_DELAY_SLOT);
1493 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001494 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001495
1496 __ bind(&with_tos_register);
Ben Murdochc5610432016-08-08 18:44:38 +01001497 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001498 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001499 __ Branch(&unknown_state, ne, t2,
1500 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001501
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001502 __ Ret(USE_DELAY_SLOT);
1503 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001504 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001505
1506 __ bind(&unknown_state);
1507 __ stop("no cases left");
1508}
1509
1510
Steve Block44f0eee2011-05-26 01:26:41 +01001511void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001512 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
Steve Block44f0eee2011-05-26 01:26:41 +01001513}
1514
1515
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001516void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1517 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1518}
1519
1520
Steve Block44f0eee2011-05-26 01:26:41 +01001521void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001522 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Steve Block44f0eee2011-05-26 01:26:41 +01001523}
1524
1525
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526// Clobbers {t2, t3, t4, t5}.
1527static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1528 Register function_template_info,
1529 Label* receiver_check_failed) {
1530 Register signature = t2;
1531 Register map = t3;
1532 Register constructor = t4;
1533 Register scratch = t5;
1534
1535 // If there is no signature, return the holder.
1536 __ lw(signature, FieldMemOperand(function_template_info,
1537 FunctionTemplateInfo::kSignatureOffset));
1538 Label receiver_check_passed;
1539 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1540 &receiver_check_passed);
1541
1542 // Walk the prototype chain.
1543 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1544 Label prototype_loop_start;
1545 __ bind(&prototype_loop_start);
1546
1547 // Get the constructor, if any.
1548 __ GetMapConstructor(constructor, map, scratch, scratch);
1549 Label next_prototype;
1550 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1551 Register type = constructor;
1552 __ lw(type,
1553 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1554 __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1555
1556 // Loop through the chain of inheriting function templates.
1557 Label function_template_loop;
1558 __ bind(&function_template_loop);
1559
1560 // If the signatures match, we have a compatible receiver.
1561 __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1562 USE_DELAY_SLOT);
1563
1564 // If the current type is not a FunctionTemplateInfo, load the next prototype
1565 // in the chain.
1566 __ JumpIfSmi(type, &next_prototype);
1567 __ GetObjectType(type, scratch, scratch);
1568 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1569
1570 // Otherwise load the parent function template and iterate.
1571 __ lw(type,
1572 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1573 __ Branch(&function_template_loop);
1574
1575 // Load the next prototype and iterate.
1576 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001577 __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001578 __ DecodeField<Map::HasHiddenPrototype>(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001579 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001580 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1581 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001582
1583 __ Branch(&prototype_loop_start);
1584
1585 __ bind(&receiver_check_passed);
1586}
1587
1588
1589void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1590 // ----------- S t a t e -------------
1591 // -- a0 : number of arguments excluding receiver
1592 // -- a1 : callee
1593 // -- ra : return address
1594 // -- sp[0] : last argument
1595 // -- ...
1596 // -- sp[4 * (argc - 1)] : first argument
1597 // -- sp[4 * argc] : receiver
1598 // -----------------------------------
1599
1600 // Load the FunctionTemplateInfo.
1601 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1602 __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1603
1604 // Do the compatible receiver check.
1605 Label receiver_check_failed;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001606 __ Lsa(t8, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001607 __ lw(t0, MemOperand(t8));
1608 CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1609
1610 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1611 // beginning of the code.
1612 __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1613 __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1614 __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1615 __ Jump(t2);
1616
1617 // Compatible receiver check failed: throw an Illegal Invocation exception.
1618 __ bind(&receiver_check_failed);
1619 // Drop the arguments (including the receiver);
1620 __ Addu(t8, t8, Operand(kPointerSize));
1621 __ addu(sp, t8, zero_reg);
1622 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1623}
1624
1625
Steve Block44f0eee2011-05-26 01:26:41 +01001626void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001627 // Lookup the function in the JavaScript frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001628 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1629 {
1630 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001631 // Pass function as argument.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001632 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001633 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001634 }
1635
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001636 // If the code object is null, just return to the unoptimized code.
1637 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001638
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001639 // Load deoptimization data from the code object.
1640 // <deopt_data> = <code>[#deoptimization_data_offset]
1641 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001642
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001643 // Load the OSR entrypoint offset from the deoptimization data.
1644 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1645 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1646 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1647 __ SmiUntag(a1);
1648
1649 // Compute the target address = code_obj + header_size + osr_offset
1650 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1651 __ addu(v0, v0, a1);
1652 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1653
1654 // And "return" to the OSR entry point of the function.
1655 __ Ret();
1656}
1657
1658
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001659// static
1660void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1661 int field_index) {
1662 // ----------- S t a t e -------------
1663 // -- sp[0] : receiver
1664 // -----------------------------------
1665
1666 // 1. Pop receiver into a0 and check that it's actually a JSDate object.
1667 Label receiver_not_date;
1668 {
1669 __ Pop(a0);
1670 __ JumpIfSmi(a0, &receiver_not_date);
1671 __ GetObjectType(a0, t0, t0);
1672 __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
1673 }
1674
1675 // 2. Load the specified date field, falling back to the runtime as necessary.
1676 if (field_index == JSDate::kDateValue) {
1677 __ Ret(USE_DELAY_SLOT);
1678 __ lw(v0, FieldMemOperand(a0, JSDate::kValueOffset)); // In delay slot.
1679 } else {
1680 if (field_index < JSDate::kFirstUncachedField) {
1681 Label stamp_mismatch;
1682 __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1683 __ lw(a1, MemOperand(a1));
1684 __ lw(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
1685 __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
1686 __ Ret(USE_DELAY_SLOT);
1687 __ lw(v0, FieldMemOperand(
1688 a0, JSDate::kValueOffset +
1689 field_index * kPointerSize)); // In delay slot.
1690 __ bind(&stamp_mismatch);
1691 }
1692 FrameScope scope(masm, StackFrame::INTERNAL);
1693 __ PrepareCallCFunction(2, t0);
1694 __ li(a1, Operand(Smi::FromInt(field_index)));
1695 __ CallCFunction(
1696 ExternalReference::get_date_field_function(masm->isolate()), 2);
1697 }
1698 __ Ret();
1699
1700 // 3. Raise a TypeError if the receiver is not a date.
1701 __ bind(&receiver_not_date);
1702 __ TailCallRuntime(Runtime::kThrowNotDateError);
1703}
1704
Ben Murdochda12d292016-06-02 14:46:10 +01001705// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001706void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1707 // ----------- S t a t e -------------
1708 // -- a0 : argc
1709 // -- sp[0] : argArray
1710 // -- sp[4] : thisArg
1711 // -- sp[8] : receiver
1712 // -----------------------------------
1713
1714 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1715 // arguments from the stack (including the receiver), and push thisArg (if
1716 // present) instead.
1717 {
1718 Label no_arg;
1719 Register scratch = t0;
1720 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1721 __ mov(a3, a2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001722 // Lsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001723 __ sll(scratch, a0, kPointerSizeLog2);
1724 __ Addu(a0, sp, Operand(scratch));
1725 __ lw(a1, MemOperand(a0)); // receiver
1726 __ Subu(a0, a0, Operand(kPointerSize));
1727 __ Branch(&no_arg, lt, a0, Operand(sp));
1728 __ lw(a2, MemOperand(a0)); // thisArg
1729 __ Subu(a0, a0, Operand(kPointerSize));
1730 __ Branch(&no_arg, lt, a0, Operand(sp));
1731 __ lw(a3, MemOperand(a0)); // argArray
1732 __ bind(&no_arg);
1733 __ Addu(sp, sp, Operand(scratch));
1734 __ sw(a2, MemOperand(sp));
1735 __ mov(a0, a3);
1736 }
1737
1738 // ----------- S t a t e -------------
1739 // -- a0 : argArray
1740 // -- a1 : receiver
1741 // -- sp[0] : thisArg
1742 // -----------------------------------
1743
1744 // 2. Make sure the receiver is actually callable.
1745 Label receiver_not_callable;
1746 __ JumpIfSmi(a1, &receiver_not_callable);
1747 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1748 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1749 __ And(t0, t0, Operand(1 << Map::kIsCallable));
1750 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
1751
1752 // 3. Tail call with no arguments if argArray is null or undefined.
1753 Label no_arguments;
1754 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1755 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1756
1757 // 4a. Apply the receiver to the given argArray (passing undefined for
1758 // new.target).
1759 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1760 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1761
1762 // 4b. The argArray is either null or undefined, so we tail call without any
1763 // arguments to the receiver.
1764 __ bind(&no_arguments);
1765 {
1766 __ mov(a0, zero_reg);
1767 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1768 }
1769
1770 // 4c. The receiver is not callable, throw an appropriate TypeError.
1771 __ bind(&receiver_not_callable);
1772 {
1773 __ sw(a1, MemOperand(sp));
1774 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1775 }
1776}
1777
1778
1779// static
1780void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001781 // 1. Make sure we have at least one argument.
1782 // a0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001783 {
1784 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00001785 __ Branch(&done, ne, a0, Operand(zero_reg));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001786 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001787 __ Addu(a0, a0, Operand(1));
1788 __ bind(&done);
1789 }
1790
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 // 2. Get the function to call (passed as receiver) from the stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00001792 // a0: actual number of arguments
Ben Murdoch097c5b22016-05-18 11:27:45 +01001793 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001794 __ lw(a1, MemOperand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +00001795
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001796 // 3. Shift arguments and return address one slot down on the stack
Ben Murdoch257744e2011-11-30 15:57:28 +00001797 // (overwriting the original receiver). Adjust argument count to make
1798 // the original first argument the new receiver.
1799 // a0: actual number of arguments
1800 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001801 {
1802 Label loop;
Ben Murdoch257744e2011-11-30 15:57:28 +00001803 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001804 __ Lsa(a2, sp, a0, kPointerSizeLog2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001805
1806 __ bind(&loop);
1807 __ lw(at, MemOperand(a2, -kPointerSize));
1808 __ sw(at, MemOperand(a2));
1809 __ Subu(a2, a2, Operand(kPointerSize));
1810 __ Branch(&loop, ne, a2, Operand(sp));
1811 // Adjust the actual number of arguments and remove the top element
1812 // (which is a copy of the last argument).
1813 __ Subu(a0, a0, Operand(1));
1814 __ Pop();
1815 }
1816
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001817 // 4. Call the callable.
1818 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Andrei Popescu31002712010-02-23 13:46:05 +00001819}
1820
1821
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001822void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1823 // ----------- S t a t e -------------
1824 // -- a0 : argc
1825 // -- sp[0] : argumentsList
1826 // -- sp[4] : thisArgument
1827 // -- sp[8] : target
1828 // -- sp[12] : receiver
1829 // -----------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00001830
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001831 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1832 // remove all arguments from the stack (including the receiver), and push
1833 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001834 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001835 Label no_arg;
1836 Register scratch = t0;
1837 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1838 __ mov(a2, a1);
1839 __ mov(a3, a1);
1840 __ sll(scratch, a0, kPointerSizeLog2);
1841 __ mov(a0, scratch);
1842 __ Subu(a0, a0, Operand(kPointerSize));
1843 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1844 __ Addu(a0, sp, Operand(a0));
1845 __ lw(a1, MemOperand(a0)); // target
1846 __ Subu(a0, a0, Operand(kPointerSize));
1847 __ Branch(&no_arg, lt, a0, Operand(sp));
1848 __ lw(a2, MemOperand(a0)); // thisArgument
1849 __ Subu(a0, a0, Operand(kPointerSize));
1850 __ Branch(&no_arg, lt, a0, Operand(sp));
1851 __ lw(a3, MemOperand(a0)); // argumentsList
1852 __ bind(&no_arg);
1853 __ Addu(sp, sp, Operand(scratch));
1854 __ sw(a2, MemOperand(sp));
1855 __ mov(a0, a3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001856 }
1857
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001858 // ----------- S t a t e -------------
1859 // -- a0 : argumentsList
1860 // -- a1 : target
1861 // -- sp[0] : thisArgument
1862 // -----------------------------------
1863
1864 // 2. Make sure the target is actually callable.
1865 Label target_not_callable;
1866 __ JumpIfSmi(a1, &target_not_callable);
1867 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1868 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1869 __ And(t0, t0, Operand(1 << Map::kIsCallable));
1870 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
1871
1872 // 3a. Apply the target to the given argumentsList (passing undefined for
1873 // new.target).
1874 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1875 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1876
1877 // 3b. The target is not callable, throw an appropriate TypeError.
1878 __ bind(&target_not_callable);
1879 {
1880 __ sw(a1, MemOperand(sp));
1881 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1882 }
1883}
1884
1885
1886void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1887 // ----------- S t a t e -------------
1888 // -- a0 : argc
1889 // -- sp[0] : new.target (optional)
1890 // -- sp[4] : argumentsList
1891 // -- sp[8] : target
1892 // -- sp[12] : receiver
1893 // -----------------------------------
1894
1895 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1896 // new.target into a3 (if present, otherwise use target), remove all
1897 // arguments from the stack (including the receiver), and push thisArgument
1898 // (if present) instead.
1899 {
1900 Label no_arg;
1901 Register scratch = t0;
1902 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1903 __ mov(a2, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001904 // Lsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001905 __ sll(scratch, a0, kPointerSizeLog2);
1906 __ Addu(a0, sp, Operand(scratch));
1907 __ sw(a2, MemOperand(a0)); // receiver
1908 __ Subu(a0, a0, Operand(kPointerSize));
1909 __ Branch(&no_arg, lt, a0, Operand(sp));
1910 __ lw(a1, MemOperand(a0)); // target
1911 __ mov(a3, a1); // new.target defaults to target
1912 __ Subu(a0, a0, Operand(kPointerSize));
1913 __ Branch(&no_arg, lt, a0, Operand(sp));
1914 __ lw(a2, MemOperand(a0)); // argumentsList
1915 __ Subu(a0, a0, Operand(kPointerSize));
1916 __ Branch(&no_arg, lt, a0, Operand(sp));
1917 __ lw(a3, MemOperand(a0)); // new.target
1918 __ bind(&no_arg);
1919 __ Addu(sp, sp, Operand(scratch));
1920 __ mov(a0, a2);
1921 }
1922
1923 // ----------- S t a t e -------------
1924 // -- a0 : argumentsList
1925 // -- a3 : new.target
1926 // -- a1 : target
1927 // -- sp[0] : receiver (undefined)
1928 // -----------------------------------
1929
1930 // 2. Make sure the target is actually a constructor.
1931 Label target_not_constructor;
1932 __ JumpIfSmi(a1, &target_not_constructor);
1933 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1934 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1935 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
1936 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg));
1937
1938 // 3. Make sure the target is actually a constructor.
1939 Label new_target_not_constructor;
1940 __ JumpIfSmi(a3, &new_target_not_constructor);
1941 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
1942 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1943 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
1944 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg));
1945
1946 // 4a. Construct the target with the given new.target and argumentsList.
1947 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1948
1949 // 4b. The target is not a constructor, throw an appropriate TypeError.
1950 __ bind(&target_not_constructor);
1951 {
1952 __ sw(a1, MemOperand(sp));
1953 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1954 }
1955
1956 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1957 __ bind(&new_target_not_constructor);
1958 {
1959 __ sw(a3, MemOperand(sp));
1960 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1961 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001962}
1963
1964
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001965static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1966 Label* stack_overflow) {
1967 // ----------- S t a t e -------------
1968 // -- a0 : actual number of arguments
1969 // -- a1 : function (passed through to callee)
1970 // -- a2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001971 // -- a3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001972 // -----------------------------------
1973 // Check the stack for overflow. We are not trying to catch
1974 // interruptions (e.g. debug break and preemption) here, so the "real stack
1975 // limit" is checked.
1976 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1977 // Make t1 the space we have left. The stack might already be overflowed
1978 // here which will cause t1 to become negative.
1979 __ subu(t1, sp, t1);
1980 // Check if the arguments will overflow the stack.
1981 __ sll(at, a2, kPointerSizeLog2);
1982 // Signed comparison.
1983 __ Branch(stack_overflow, le, t1, Operand(at));
1984}
1985
1986
Ben Murdoch257744e2011-11-30 15:57:28 +00001987static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1988 __ sll(a0, a0, kSmiTagSize);
1989 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1990 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001991 __ Addu(fp, sp,
1992 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001993}
1994
1995
1996static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1997 // ----------- S t a t e -------------
1998 // -- v0 : result being passed through
1999 // -----------------------------------
2000 // Get the number of arguments passed (as a smi), tear down the frame and
2001 // then tear down the parameters.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002002 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2003 kPointerSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002004 __ mov(sp, fp);
2005 __ MultiPop(fp.bit() | ra.bit());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002006 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002007 // Adjust for the receiver.
2008 __ Addu(sp, sp, Operand(kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00002009}
2010
2011
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002012// static
2013void Builtins::Generate_Apply(MacroAssembler* masm) {
2014 // ----------- S t a t e -------------
2015 // -- a0 : argumentsList
2016 // -- a1 : target
2017 // -- a3 : new.target (checked to be constructor or undefined)
2018 // -- sp[0] : thisArgument
2019 // -----------------------------------
2020
2021 // Create the list of arguments from the array-like argumentsList.
2022 {
2023 Label create_arguments, create_array, create_runtime, done_create;
2024 __ JumpIfSmi(a0, &create_runtime);
2025
2026 // Load the map of argumentsList into a2.
2027 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
2028
2029 // Load native context into t0.
2030 __ lw(t0, NativeContextMemOperand());
2031
2032 // Check if argumentsList is an (unmodified) arguments object.
2033 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2034 __ Branch(&create_arguments, eq, a2, Operand(at));
2035 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX));
2036 __ Branch(&create_arguments, eq, a2, Operand(at));
2037
2038 // Check if argumentsList is a fast JSArray.
2039 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
2040 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
2041 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2042
2043 // Ask the runtime to create the list (actually a FixedArray).
2044 __ bind(&create_runtime);
2045 {
2046 FrameScope scope(masm, StackFrame::INTERNAL);
2047 __ Push(a1, a3, a0);
2048 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2049 __ mov(a0, v0);
2050 __ Pop(a1, a3);
2051 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
2052 __ SmiUntag(a2);
2053 }
2054 __ Branch(&done_create);
2055
2056 // Try to create the list from an arguments object.
2057 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002058 __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002059 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
2060 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset));
2061 __ Branch(&create_runtime, ne, a2, Operand(at));
2062 __ SmiUntag(a2);
2063 __ mov(a0, t0);
2064 __ Branch(&done_create);
2065
2066 // Try to create the list from a JSArray object.
2067 __ bind(&create_array);
2068 __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset));
2069 __ DecodeField<Map::ElementsKindBits>(a2);
2070 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2071 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2072 STATIC_ASSERT(FAST_ELEMENTS == 2);
2073 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
2074 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2075 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2076 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2077 __ SmiUntag(a2);
2078
2079 __ bind(&done_create);
2080 }
2081
2082 // Check for stack overflow.
2083 {
2084 // Check the stack for overflow. We are not trying to catch interruptions
2085 // (i.e. debug break and preemption) here, so check the "real stack limit".
2086 Label done;
2087 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex);
2088 // Make ip the space we have left. The stack might already be overflowed
2089 // here which will cause ip to become negative.
2090 __ Subu(t0, sp, t0);
2091 // Check if the arguments will overflow the stack.
2092 __ sll(at, a2, kPointerSizeLog2);
2093 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison.
2094 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2095 __ bind(&done);
2096 }
2097
2098 // ----------- S t a t e -------------
2099 // -- a1 : target
2100 // -- a0 : args (a FixedArray built from argumentsList)
2101 // -- a2 : len (number of elements to push from args)
2102 // -- a3 : new.target (checked to be constructor or undefined)
2103 // -- sp[0] : thisArgument
2104 // -----------------------------------
2105
2106 // Push arguments onto the stack (thisArgument is already on the stack).
2107 {
2108 __ mov(t0, zero_reg);
2109 Label done, loop;
2110 __ bind(&loop);
2111 __ Branch(&done, eq, t0, Operand(a2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002112 __ Lsa(at, a0, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002113 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize));
2114 __ Push(at);
2115 __ Addu(t0, t0, Operand(1));
2116 __ Branch(&loop);
2117 __ bind(&done);
2118 __ Move(a0, t0);
2119 }
2120
2121 // Dispatch to Call or Construct depending on whether new.target is undefined.
2122 {
2123 Label construct;
2124 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2125 __ Branch(&construct, ne, a3, Operand(at));
2126 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2127 __ bind(&construct);
2128 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2129 }
2130}
2131
Ben Murdoch097c5b22016-05-18 11:27:45 +01002132namespace {
2133
2134// Drops top JavaScript frame and an arguments adaptor frame below it (if
2135// present) preserving all the arguments prepared for current call.
2136// Does nothing if debugger is currently active.
2137// ES6 14.6.3. PrepareForTailCall
2138//
2139// Stack structure for the function g() tail calling f():
2140//
2141// ------- Caller frame: -------
2142// | ...
2143// | g()'s arg M
2144// | ...
2145// | g()'s arg 1
2146// | g()'s receiver arg
2147// | g()'s caller pc
2148// ------- g()'s frame: -------
2149// | g()'s caller fp <- fp
2150// | g()'s context
2151// | function pointer: g
2152// | -------------------------
2153// | ...
2154// | ...
2155// | f()'s arg N
2156// | ...
2157// | f()'s arg 1
2158// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2159// ----------------------
2160//
2161void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2162 Register scratch1, Register scratch2,
2163 Register scratch3) {
2164 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2165 Comment cmnt(masm, "[ PrepareForTailCall");
2166
Ben Murdochda12d292016-06-02 14:46:10 +01002167 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002168 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002169 ExternalReference is_tail_call_elimination_enabled =
2170 ExternalReference::is_tail_call_elimination_enabled_address(
2171 masm->isolate());
2172 __ li(at, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002173 __ lb(scratch1, MemOperand(at));
Ben Murdochda12d292016-06-02 14:46:10 +01002174 __ Branch(&done, eq, scratch1, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002175
2176 // Drop possible interpreter handler/stub frame.
2177 {
2178 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002179 __ lw(scratch3,
2180 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002181 __ Branch(&no_interpreter_frame, ne, scratch3,
2182 Operand(Smi::FromInt(StackFrame::STUB)));
2183 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2184 __ bind(&no_interpreter_frame);
2185 }
2186
2187 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002188 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002189 Label no_arguments_adaptor, formal_parameter_count_loaded;
2190 __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002191 __ lw(scratch3,
2192 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002193 __ Branch(&no_arguments_adaptor, ne, scratch3,
2194 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2195
Ben Murdochda12d292016-06-02 14:46:10 +01002196 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002197 __ mov(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002198 __ lw(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002199 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002200 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002201 __ Branch(&formal_parameter_count_loaded);
2202
2203 __ bind(&no_arguments_adaptor);
2204 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01002205 __ lw(scratch1,
2206 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002207 __ lw(scratch1,
2208 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002209 __ lw(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002210 FieldMemOperand(scratch1,
2211 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002212 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002213
2214 __ bind(&formal_parameter_count_loaded);
2215
Ben Murdochda12d292016-06-02 14:46:10 +01002216 ParameterCount callee_args_count(args_reg);
2217 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2218 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002219 __ bind(&done);
2220}
2221} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002222
2223// static
2224void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002225 ConvertReceiverMode mode,
2226 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 // ----------- S t a t e -------------
2228 // -- a0 : the number of arguments (not including the receiver)
2229 // -- a1 : the function to call (checked to be a JSFunction)
2230 // -----------------------------------
2231 __ AssertFunction(a1);
2232
2233 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2234 // Check that the function is not a "classConstructor".
2235 Label class_constructor;
2236 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2237 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2238 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2239 __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2240
2241 // Enter the context of the function; ToObject has to run in the function
2242 // context, and we also need to take the global proxy from the function
2243 // context in case of conversion.
2244 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2245 SharedFunctionInfo::kStrictModeByteOffset);
2246 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2247 // We need to convert the receiver for non-native sloppy mode functions.
2248 Label done_convert;
2249 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2250 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2251 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2252 __ Branch(&done_convert, ne, at, Operand(zero_reg));
2253 {
2254 // ----------- S t a t e -------------
2255 // -- a0 : the number of arguments (not including the receiver)
2256 // -- a1 : the function to call (checked to be a JSFunction)
2257 // -- a2 : the shared function info.
2258 // -- cp : the function context.
2259 // -----------------------------------
2260
2261 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2262 // Patch receiver to global proxy.
2263 __ LoadGlobalProxy(a3);
2264 } else {
2265 Label convert_to_object, convert_receiver;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002267 __ lw(a3, MemOperand(at));
2268 __ JumpIfSmi(a3, &convert_to_object);
2269 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2270 __ GetObjectType(a3, t0, t0);
2271 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
2272 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2273 Label convert_global_proxy;
2274 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2275 &convert_global_proxy);
2276 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2277 __ bind(&convert_global_proxy);
2278 {
2279 // Patch receiver to global proxy.
2280 __ LoadGlobalProxy(a3);
2281 }
2282 __ Branch(&convert_receiver);
2283 }
2284 __ bind(&convert_to_object);
2285 {
2286 // Convert receiver using ToObject.
2287 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2288 // in the fast case? (fall back to AllocateInNewSpace?)
2289 FrameScope scope(masm, StackFrame::INTERNAL);
2290 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
2291 __ Push(a0, a1);
2292 __ mov(a0, a3);
2293 ToObjectStub stub(masm->isolate());
2294 __ CallStub(&stub);
2295 __ mov(a3, v0);
2296 __ Pop(a0, a1);
2297 __ sra(a0, a0, kSmiTagSize); // Un-tag.
2298 }
2299 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2300 __ bind(&convert_receiver);
2301 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002302 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002303 __ sw(a3, MemOperand(at));
2304 }
2305 __ bind(&done_convert);
2306
2307 // ----------- S t a t e -------------
2308 // -- a0 : the number of arguments (not including the receiver)
2309 // -- a1 : the function to call (checked to be a JSFunction)
2310 // -- a2 : the shared function info.
2311 // -- cp : the function context.
2312 // -----------------------------------
2313
Ben Murdoch097c5b22016-05-18 11:27:45 +01002314 if (tail_call_mode == TailCallMode::kAllow) {
2315 PrepareForTailCall(masm, a0, t0, t1, t2);
2316 }
2317
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002318 __ lw(a2,
2319 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2320 __ sra(a2, a2, kSmiTagSize); // Un-tag.
2321 ParameterCount actual(a0);
2322 ParameterCount expected(a2);
2323 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2324 CheckDebugStepCallWrapper());
2325
2326 // The function is a "classConstructor", need to raise an exception.
2327 __ bind(&class_constructor);
2328 {
2329 FrameScope frame(masm, StackFrame::INTERNAL);
2330 __ Push(a1);
2331 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2332 }
2333}
2334
2335
2336// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002337void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2338 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002339 // ----------- S t a t e -------------
2340 // -- a0 : the number of arguments (not including the receiver)
2341 // -- a1 : the function to call (checked to be a JSBoundFunction)
2342 // -----------------------------------
2343 __ AssertBoundFunction(a1);
2344
Ben Murdoch097c5b22016-05-18 11:27:45 +01002345 if (tail_call_mode == TailCallMode::kAllow) {
2346 PrepareForTailCall(masm, a0, t0, t1, t2);
2347 }
2348
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002349 // Patch the receiver to [[BoundThis]].
2350 {
2351 __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002352 __ Lsa(t0, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002353 __ sw(at, MemOperand(t0));
2354 }
2355
2356 // Load [[BoundArguments]] into a2 and length of that into t0.
2357 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2358 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2359 __ SmiUntag(t0);
2360
2361 // ----------- S t a t e -------------
2362 // -- a0 : the number of arguments (not including the receiver)
2363 // -- a1 : the function to call (checked to be a JSBoundFunction)
2364 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2365 // -- t0 : the number of [[BoundArguments]]
2366 // -----------------------------------
2367
2368 // Reserve stack space for the [[BoundArguments]].
2369 {
2370 Label done;
2371 __ sll(t1, t0, kPointerSizeLog2);
2372 __ Subu(sp, sp, Operand(t1));
2373 // Check the stack for overflow. We are not trying to catch interruptions
2374 // (i.e. debug break and preemption) here, so check the "real stack limit".
2375 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2376 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2377 // Restore the stack pointer.
2378 __ Addu(sp, sp, Operand(t1));
2379 {
2380 FrameScope scope(masm, StackFrame::MANUAL);
2381 __ EnterFrame(StackFrame::INTERNAL);
2382 __ CallRuntime(Runtime::kThrowStackOverflow);
2383 }
2384 __ bind(&done);
2385 }
2386
2387 // Relocate arguments down the stack.
2388 {
2389 Label loop, done_loop;
2390 __ mov(t1, zero_reg);
2391 __ bind(&loop);
2392 __ Branch(&done_loop, gt, t1, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002393 __ Lsa(t2, sp, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002394 __ lw(at, MemOperand(t2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002395 __ Lsa(t2, sp, t1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002396 __ sw(at, MemOperand(t2));
2397 __ Addu(t0, t0, Operand(1));
2398 __ Addu(t1, t1, Operand(1));
2399 __ Branch(&loop);
2400 __ bind(&done_loop);
2401 }
2402
2403 // Copy [[BoundArguments]] to the stack (below the arguments).
2404 {
2405 Label loop, done_loop;
2406 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2407 __ SmiUntag(t0);
2408 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2409 __ bind(&loop);
2410 __ Subu(t0, t0, Operand(1));
2411 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002412 __ Lsa(t1, a2, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002413 __ lw(at, MemOperand(t1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002414 __ Lsa(t1, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002415 __ sw(at, MemOperand(t1));
2416 __ Addu(a0, a0, Operand(1));
2417 __ Branch(&loop);
2418 __ bind(&done_loop);
2419 }
2420
2421 // Call the [[BoundTargetFunction]] via the Call builtin.
2422 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2423 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2424 masm->isolate())));
2425 __ lw(at, MemOperand(at));
2426 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2427 __ Jump(at);
2428}
2429
2430
2431// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002432void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2433 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002434 // ----------- S t a t e -------------
2435 // -- a0 : the number of arguments (not including the receiver)
2436 // -- a1 : the target to call (can be any Object).
2437 // -----------------------------------
2438
2439 Label non_callable, non_function, non_smi;
2440 __ JumpIfSmi(a1, &non_callable);
2441 __ bind(&non_smi);
2442 __ GetObjectType(a1, t1, t2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002443 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002444 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002445 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002447
2448 // Check if target has a [[Call]] internal method.
2449 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2450 __ And(t1, t1, Operand(1 << Map::kIsCallable));
2451 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2452
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002453 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2454
Ben Murdoch097c5b22016-05-18 11:27:45 +01002455 // 0. Prepare for tail call if necessary.
2456 if (tail_call_mode == TailCallMode::kAllow) {
2457 PrepareForTailCall(masm, a0, t0, t1, t2);
2458 }
2459
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460 // 1. Runtime fallback for Proxy [[Call]].
2461 __ Push(a1);
2462 // Increase the arguments size to include the pushed function and the
2463 // existing receiver on the stack.
2464 __ Addu(a0, a0, 2);
2465 // Tail-call to the runtime.
2466 __ JumpToExternalReference(
2467 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2468
2469 // 2. Call to something else, which might have a [[Call]] internal method (if
2470 // not we raise an exception).
2471 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002472 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002473 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002474 __ sw(a1, MemOperand(at));
2475 // Let the "call_as_function_delegate" take care of the rest.
2476 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2477 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002478 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002479 RelocInfo::CODE_TARGET);
2480
2481 // 3. Call to something that is not callable.
2482 __ bind(&non_callable);
2483 {
2484 FrameScope scope(masm, StackFrame::INTERNAL);
2485 __ Push(a1);
2486 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2487 }
2488}
2489
2490
2491// static
2492void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2493 // ----------- S t a t e -------------
2494 // -- a0 : the number of arguments (not including the receiver)
2495 // -- a1 : the constructor to call (checked to be a JSFunction)
2496 // -- a3 : the new target (checked to be a constructor)
2497 // -----------------------------------
2498 __ AssertFunction(a1);
2499
2500 // Calling convention for function specific ConstructStubs require
2501 // a2 to contain either an AllocationSite or undefined.
2502 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2503
2504 // Tail call to the function-specific construct stub (still in the caller
2505 // context at this point).
2506 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2507 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
2508 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
2509 __ Jump(at);
2510}
2511
2512
2513// static
2514void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2515 // ----------- S t a t e -------------
2516 // -- a0 : the number of arguments (not including the receiver)
2517 // -- a1 : the function to call (checked to be a JSBoundFunction)
2518 // -- a3 : the new target (checked to be a constructor)
2519 // -----------------------------------
2520 __ AssertBoundFunction(a1);
2521
2522 // Load [[BoundArguments]] into a2 and length of that into t0.
2523 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2524 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2525 __ SmiUntag(t0);
2526
2527 // ----------- S t a t e -------------
2528 // -- a0 : the number of arguments (not including the receiver)
2529 // -- a1 : the function to call (checked to be a JSBoundFunction)
2530 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2531 // -- a3 : the new target (checked to be a constructor)
2532 // -- t0 : the number of [[BoundArguments]]
2533 // -----------------------------------
2534
2535 // Reserve stack space for the [[BoundArguments]].
2536 {
2537 Label done;
2538 __ sll(t1, t0, kPointerSizeLog2);
2539 __ Subu(sp, sp, Operand(t1));
2540 // Check the stack for overflow. We are not trying to catch interruptions
2541 // (i.e. debug break and preemption) here, so check the "real stack limit".
2542 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2543 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2544 // Restore the stack pointer.
2545 __ Addu(sp, sp, Operand(t1));
2546 {
2547 FrameScope scope(masm, StackFrame::MANUAL);
2548 __ EnterFrame(StackFrame::INTERNAL);
2549 __ CallRuntime(Runtime::kThrowStackOverflow);
2550 }
2551 __ bind(&done);
2552 }
2553
2554 // Relocate arguments down the stack.
2555 {
2556 Label loop, done_loop;
2557 __ mov(t1, zero_reg);
2558 __ bind(&loop);
2559 __ Branch(&done_loop, ge, t1, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002560 __ Lsa(t2, sp, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002561 __ lw(at, MemOperand(t2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002562 __ Lsa(t2, sp, t1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002563 __ sw(at, MemOperand(t2));
2564 __ Addu(t0, t0, Operand(1));
2565 __ Addu(t1, t1, Operand(1));
2566 __ Branch(&loop);
2567 __ bind(&done_loop);
2568 }
2569
2570 // Copy [[BoundArguments]] to the stack (below the arguments).
2571 {
2572 Label loop, done_loop;
2573 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2574 __ SmiUntag(t0);
2575 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2576 __ bind(&loop);
2577 __ Subu(t0, t0, Operand(1));
2578 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002579 __ Lsa(t1, a2, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002580 __ lw(at, MemOperand(t1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002581 __ Lsa(t1, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002582 __ sw(at, MemOperand(t1));
2583 __ Addu(a0, a0, Operand(1));
2584 __ Branch(&loop);
2585 __ bind(&done_loop);
2586 }
2587
2588 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2589 {
2590 Label skip_load;
2591 __ Branch(&skip_load, ne, a1, Operand(a3));
2592 __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2593 __ bind(&skip_load);
2594 }
2595
2596 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2597 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2598 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2599 __ lw(at, MemOperand(at));
2600 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2601 __ Jump(at);
2602}
2603
2604
2605// static
2606void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2607 // ----------- S t a t e -------------
2608 // -- a0 : the number of arguments (not including the receiver)
2609 // -- a1 : the constructor to call (checked to be a JSProxy)
2610 // -- a3 : the new target (either the same as the constructor or
2611 // the JSFunction on which new was invoked initially)
2612 // -----------------------------------
2613
2614 // Call into the Runtime for Proxy [[Construct]].
2615 __ Push(a1, a3);
2616 // Include the pushed new_target, constructor and the receiver.
2617 __ Addu(a0, a0, Operand(3));
2618 // Tail-call to the runtime.
2619 __ JumpToExternalReference(
2620 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2621}
2622
2623
2624// static
2625void Builtins::Generate_Construct(MacroAssembler* masm) {
2626 // ----------- S t a t e -------------
2627 // -- a0 : the number of arguments (not including the receiver)
2628 // -- a1 : the constructor to call (can be any Object)
2629 // -- a3 : the new target (either the same as the constructor or
2630 // the JSFunction on which new was invoked initially)
2631 // -----------------------------------
2632
2633 // Check if target is a Smi.
2634 Label non_constructor;
2635 __ JumpIfSmi(a1, &non_constructor);
2636
2637 // Dispatch based on instance type.
2638 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2639 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2640 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2641 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2642
2643 // Check if target has a [[Construct]] internal method.
2644 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2645 __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2646 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2647
2648 // Only dispatch to bound functions after checking whether they are
2649 // constructors.
2650 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2651 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2652
2653 // Only dispatch to proxies after checking whether they are constructors.
2654 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2655 eq, t2, Operand(JS_PROXY_TYPE));
2656
2657 // Called Construct on an exotic Object with a [[Construct]] internal method.
2658 {
2659 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002660 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002661 __ sw(a1, MemOperand(at));
2662 // Let the "call_as_constructor_delegate" take care of the rest.
2663 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2664 __ Jump(masm->isolate()->builtins()->CallFunction(),
2665 RelocInfo::CODE_TARGET);
2666 }
2667
2668 // Called Construct on an Object that doesn't have a [[Construct]] internal
2669 // method.
2670 __ bind(&non_constructor);
2671 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2672 RelocInfo::CODE_TARGET);
2673}
2674
Ben Murdochc5610432016-08-08 18:44:38 +01002675// static
2676void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2677 // ----------- S t a t e -------------
2678 // -- a0 : requested object size (untagged)
2679 // -- ra : return address
2680 // -----------------------------------
2681 __ SmiTag(a0);
2682 __ Push(a0);
2683 __ Move(cp, Smi::FromInt(0));
2684 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2685}
2686
2687// static
2688void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2689 // ----------- S t a t e -------------
2690 // -- a0 : requested object size (untagged)
2691 // -- ra : return address
2692 // -----------------------------------
2693 __ SmiTag(a0);
2694 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2695 __ Push(a0, a1);
2696 __ Move(cp, Smi::FromInt(0));
2697 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2698}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002699
Andrei Popescu31002712010-02-23 13:46:05 +00002700void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002701 // State setup as expected by MacroAssembler::InvokePrologue.
2702 // ----------- S t a t e -------------
2703 // -- a0: actual arguments count
2704 // -- a1: function (passed through to callee)
2705 // -- a2: expected arguments count
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002706 // -- a3: new target (passed through to callee)
Ben Murdoch257744e2011-11-30 15:57:28 +00002707 // -----------------------------------
2708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002709 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdoch257744e2011-11-30 15:57:28 +00002710
2711 Label enough, too_few;
2712 __ Branch(&dont_adapt_arguments, eq,
2713 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2714 // We use Uless as the number of argument should always be greater than 0.
2715 __ Branch(&too_few, Uless, a0, Operand(a2));
2716
2717 { // Enough parameters: actual >= expected.
2718 // a0: actual number of arguments as a smi
2719 // a1: function
2720 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002721 // a3: new target (passed through to callee)
Ben Murdoch257744e2011-11-30 15:57:28 +00002722 __ bind(&enough);
2723 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002724 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00002725
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002726 // Calculate copy start address into a0 and copy end address into t1.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002727 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002728 // Adjust for return address and receiver.
2729 __ Addu(a0, a0, Operand(2 * kPointerSize));
2730 // Compute copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002731 __ sll(t1, a2, kPointerSizeLog2);
2732 __ subu(t1, a0, t1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002733
2734 // Copy the arguments (including the receiver) to the new stack frame.
2735 // a0: copy start address
2736 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002737 // a2: expected number of arguments
2738 // a3: new target (passed through to callee)
2739 // t1: copy end address
Ben Murdoch257744e2011-11-30 15:57:28 +00002740
2741 Label copy;
2742 __ bind(&copy);
2743 __ lw(t0, MemOperand(a0));
2744 __ push(t0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002745 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002746 __ addiu(a0, a0, -kPointerSize); // In delay slot.
2747
2748 __ jmp(&invoke);
2749 }
2750
2751 { // Too few parameters: Actual < expected.
2752 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002753 EnterArgumentsAdaptorFrame(masm);
2754 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2755
2756 // Calculate copy start address into a0 and copy end address into t3.
Ben Murdoch257744e2011-11-30 15:57:28 +00002757 // a0: actual number of arguments as a smi
2758 // a1: function
2759 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002760 // a3: new target (passed through to callee)
Ben Murdoch097c5b22016-05-18 11:27:45 +01002761 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002762 // Adjust for return address and receiver.
2763 __ Addu(a0, a0, Operand(2 * kPointerSize));
2764 // Compute copy end address. Also adjust for return address.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002765 __ Addu(t3, fp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002766
2767 // Copy the arguments (including the receiver) to the new stack frame.
2768 // a0: copy start address
2769 // a1: function
2770 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002771 // a3: new target (passed through to callee)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002772 // t3: copy end address
Ben Murdoch257744e2011-11-30 15:57:28 +00002773 Label copy;
2774 __ bind(&copy);
2775 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002776 __ Subu(sp, sp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002777 __ Subu(a0, a0, kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002778 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
2779 __ sw(t0, MemOperand(sp)); // In the delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00002780
2781 // Fill the remaining expected arguments with undefined.
2782 // a1: function
2783 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002784 // a3: new target (passed through to callee)
Ben Murdoch257744e2011-11-30 15:57:28 +00002785 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
2786 __ sll(t2, a2, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002787 __ Subu(t1, fp, Operand(t2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002788 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002789 __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002790 2 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002791
2792 Label fill;
2793 __ bind(&fill);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002794 __ Subu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002795 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002796 __ sw(t0, MemOperand(sp));
Ben Murdoch257744e2011-11-30 15:57:28 +00002797 }
2798
2799 // Call the entry point.
2800 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002801 __ mov(a0, a2);
2802 // a0 : expected number of arguments
2803 // a1 : function (passed through to callee)
2804 // a3 : new target (passed through to callee)
2805 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2806 __ Call(t0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002807
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002808 // Store offset of return address for deoptimizer.
2809 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2810
Ben Murdoch257744e2011-11-30 15:57:28 +00002811 // Exit frame and return.
2812 LeaveArgumentsAdaptorFrame(masm);
2813 __ Ret();
2814
2815
2816 // -------------------------------------------
2817 // Don't adapt arguments.
2818 // -------------------------------------------
2819 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002820 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2821 __ Jump(t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002822
2823 __ bind(&stack_overflow);
2824 {
2825 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002826 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002827 __ break_(0xCC);
2828 }
Andrei Popescu31002712010-02-23 13:46:05 +00002829}
2830
2831
2832#undef __
2833
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002834} // namespace internal
2835} // namespace v8
Andrei Popescu31002712010-02-23 13:46:05 +00002836
Leon Clarkef7060e22010-06-03 12:02:55 +01002837#endif // V8_TARGET_ARCH_MIPS