blob: 025093eda51fc84b045e3c52c581b50ffe0204f4 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS64
6
7#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
Ben Murdoch61f157c2016-09-16 13:49:30 +010019void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000020 // ----------- S t a t e -------------
21 // -- a0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022 // -- a1 : target
23 // -- a3 : new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 // -- sp[0] : last argument
25 // -- ...
26 // -- sp[8 * (argc - 1)] : first argument
27 // -- sp[8 * agrc] : receiver
28 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 __ AssertFunction(a1);
30
31 // Make sure we operate in the context of the called function (for example
32 // ConstructStubs implemented in C++ will be run in the context of the caller
33 // instead of the callee, due to the way that [[Construct]] is defined for
34 // ordinary functions).
35 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036
37 // Insert extra arguments.
Ben Murdoch61f157c2016-09-16 13:49:30 +010038 const int num_extra_args = 2;
39 __ Push(a1, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040
Emily Bernierd0a1eb72015-03-24 16:35:39 -040041 // JumpToExternalReference expects a0 to contain the number of arguments
Ben Murdochb8a8cc12014-11-26 15:28:44 +000042 // including the receiver and the extra arguments.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040043 __ Daddu(a0, a0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
46}
47
48
49// Load the built-in InternalArray function from the current context.
50static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
51 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054}
55
56
57// Load the built-in Array function from the current context.
58static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 // Load the Array function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000061}
62
63
64void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65 // ----------- S t a t e -------------
66 // -- a0 : number of arguments
67 // -- ra : return address
68 // -- sp[...]: constructor arguments
69 // -----------------------------------
70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71
72 // Get the InternalArray function.
73 GenerateLoadInternalArrayFunction(masm, a1);
74
75 if (FLAG_debug_code) {
76 // Initial map for the builtin InternalArray functions should be maps.
77 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
78 __ SmiTst(a2, a4);
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
80 a4, Operand(zero_reg));
81 __ GetObjectType(a2, a3, a4);
82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
83 a4, Operand(MAP_TYPE));
84 }
85
86 // Run the native code for the InternalArray function called as a normal
87 // function.
88 // Tail call a stub.
89 InternalArrayConstructorStub stub(masm->isolate());
90 __ TailCallStub(&stub);
91}
92
93
94void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
95 // ----------- S t a t e -------------
96 // -- a0 : number of arguments
97 // -- ra : return address
98 // -- sp[...]: constructor arguments
99 // -----------------------------------
100 Label generic_array_code;
101
102 // Get the Array function.
103 GenerateLoadArrayFunction(masm, a1);
104
105 if (FLAG_debug_code) {
106 // Initial map for the builtin Array functions should be maps.
107 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
108 __ SmiTst(a2, a4);
109 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
110 a4, Operand(zero_reg));
111 __ GetObjectType(a2, a3, a4);
112 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
113 a4, Operand(MAP_TYPE));
114 }
115
116 // Run the native code for the Array function called as a normal function.
117 // Tail call a stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000118 __ mov(a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
120 ArrayConstructorStub stub(masm->isolate());
121 __ TailCallStub(&stub);
122}
123
124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000125// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100126void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
127 // ----------- S t a t e -------------
128 // -- a0 : number of arguments
Ben Murdoch61f157c2016-09-16 13:49:30 +0100129 // -- a1 : function
130 // -- cp : context
Ben Murdoch097c5b22016-05-18 11:27:45 +0100131 // -- ra : return address
132 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
133 // -- sp[(argc + 1) * 8] : receiver
134 // -----------------------------------
Ben Murdoch097c5b22016-05-18 11:27:45 +0100135 Heap::RootListIndex const root_index =
136 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
137 : Heap::kMinusInfinityValueRootIndex;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100138
139 // Load the accumulator with the default return value (either -Infinity or
Ben Murdoch61f157c2016-09-16 13:49:30 +0100140 // +Infinity), with the tagged value in t1 and the double value in f0.
141 __ LoadRoot(t1, root_index);
142 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
Ben Murdochda12d292016-06-02 14:46:10 +0100143 __ Addu(a3, a0, 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100144
145 Label done_loop, loop;
146 __ bind(&loop);
147 {
148 // Check if all parameters done.
149 __ Dsubu(a0, a0, Operand(1));
150 __ Branch(&done_loop, lt, a0, Operand(zero_reg));
151
152 // Load the next parameter tagged value into a2.
153 __ Dlsa(at, sp, a0, kPointerSizeLog2);
154 __ ld(a2, MemOperand(at));
155
156 // Load the double value of the parameter into f2, maybe converting the
Ben Murdoch61f157c2016-09-16 13:49:30 +0100157 // parameter to a number first using the ToNumber builtin if necessary.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100158 Label convert, convert_smi, convert_number, done_convert;
159 __ bind(&convert);
160 __ JumpIfSmi(a2, &convert_smi);
161 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset));
162 __ JumpIfRoot(a4, Heap::kHeapNumberMapRootIndex, &convert_number);
163 {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100164 // Parameter is not a Number, use the ToNumber builtin to convert it.
165 FrameScope scope(masm, StackFrame::MANUAL);
166 __ Push(ra, fp);
167 __ Move(fp, sp);
168 __ Push(cp, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100169 __ SmiTag(a0);
170 __ SmiTag(a3);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100171 __ Push(a0, t1, a3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100172 __ mov(a0, a2);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100173 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100174 __ mov(a2, v0);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100175 __ Pop(a0, t1, a3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100176 {
177 // Restore the double accumulator value (f0).
178 Label restore_smi, done_restore;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100179 __ JumpIfSmi(t1, &restore_smi);
180 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100181 __ jmp(&done_restore);
182 __ bind(&restore_smi);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100183 __ SmiToDoubleFPURegister(t1, f0, a4);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100184 __ bind(&done_restore);
185 }
186 __ SmiUntag(a3);
187 __ SmiUntag(a0);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100188 __ Pop(cp, a1);
189 __ Pop(ra, fp);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100190 }
191 __ jmp(&convert);
192 __ bind(&convert_number);
193 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
194 __ jmp(&done_convert);
195 __ bind(&convert_smi);
196 __ SmiToDoubleFPURegister(a2, f2, a4);
197 __ bind(&done_convert);
198
Ben Murdochda12d292016-06-02 14:46:10 +0100199 // Perform the actual comparison with using Min/Max macro instructions the
200 // accumulator value on the left hand side (f0) and the next parameter value
201 // on the right hand side (f2).
202 // We need to work out which HeapNumber (or smi) the result came from.
203 Label compare_nan;
204 __ BranchF(nullptr, &compare_nan, eq, f0, f2);
205 __ Move(a4, f0);
206 if (kind == MathMaxMinKind::kMin) {
207 __ MinNaNCheck_d(f0, f0, f2);
208 } else {
209 DCHECK(kind == MathMaxMinKind::kMax);
210 __ MaxNaNCheck_d(f0, f0, f2);
211 }
212 __ Move(at, f0);
213 __ Branch(&loop, eq, a4, Operand(at));
Ben Murdoch61f157c2016-09-16 13:49:30 +0100214 __ mov(t1, a2);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100215 __ jmp(&loop);
216
217 // At least one side is NaN, which means that the result will be NaN too.
218 __ bind(&compare_nan);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100219 __ LoadRoot(t1, Heap::kNanValueRootIndex);
220 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100221 __ jmp(&loop);
222 }
223
224 __ bind(&done_loop);
225 __ Dlsa(sp, sp, a3, kPointerSizeLog2);
Ben Murdochda12d292016-06-02 14:46:10 +0100226 __ Ret(USE_DELAY_SLOT);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100227 __ mov(v0, t1); // In delay slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100228}
229
230// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000231void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000232 // ----------- S t a t e -------------
233 // -- a0 : number of arguments
234 // -- a1 : constructor function
235 // -- ra : return address
236 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
237 // -- sp[argc * 8] : receiver
238 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 // 1. Load the first argument into a0 and get rid of the rest (including the
241 // receiver).
242 Label no_arguments;
243 {
244 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
245 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100246 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247 __ ld(a0, MemOperand(sp));
248 __ Drop(2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 }
250
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 // 2a. Convert first argument to number.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100252 __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000254 // 2b. No arguments, return +0.
255 __ bind(&no_arguments);
256 __ Move(v0, Smi::FromInt(0));
257 __ DropAndRet(1);
258}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000260
261void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263 // -- a0 : number of arguments
264 // -- a1 : constructor function
265 // -- a3 : new target
266 // -- ra : return address
267 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
268 // -- sp[argc * 8] : receiver
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 // -----------------------------------
270
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000271 // 1. Make sure we operate in the context of the called function.
272 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000273
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000274 // 2. Load the first argument into a0 and get rid of the rest (including the
275 // receiver).
276 {
277 Label no_arguments, done;
278 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
279 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100280 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000281 __ ld(a0, MemOperand(sp));
282 __ Drop(2);
283 __ jmp(&done);
284 __ bind(&no_arguments);
285 __ Move(a0, Smi::FromInt(0));
286 __ Drop(1);
287 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000288 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000290 // 3. Make sure a0 is a number.
291 {
292 Label done_convert;
293 __ JumpIfSmi(a0, &done_convert);
294 __ GetObjectType(a0, a2, a2);
295 __ Branch(&done_convert, eq, t0, Operand(HEAP_NUMBER_TYPE));
296 {
297 FrameScope scope(masm, StackFrame::INTERNAL);
298 __ Push(a1, a3);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100299 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300 __ Move(a0, v0);
301 __ Pop(a1, a3);
302 }
303 __ bind(&done_convert);
304 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000306 // 4. Check if new target and constructor differ.
307 Label new_object;
308 __ Branch(&new_object, ne, a1, Operand(a3));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000309
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310 // 5. Allocate a JSValue wrapper for the number.
311 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000312 __ Ret();
313
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000314 // 6. Fallback to the runtime to create new object.
315 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 {
317 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100318 __ Push(a0);
319 FastNewObjectStub stub(masm->isolate());
320 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 __ Pop(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000323 __ Ret(USE_DELAY_SLOT);
324 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot.
325}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000327
328// static
329void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
330 // ----------- S t a t e -------------
331 // -- a0 : number of arguments
332 // -- a1 : constructor function
333 // -- ra : return address
334 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
335 // -- sp[argc * 8] : receiver
336 // -----------------------------------
337
338 // 1. Load the first argument into a0 and get rid of the rest (including the
339 // receiver).
340 Label no_arguments;
341 {
342 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
343 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100344 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345 __ ld(a0, MemOperand(sp));
346 __ Drop(2);
347 }
348
349 // 2a. At least one argument, return a0 if it's a string, otherwise
350 // dispatch to appropriate conversion.
351 Label to_string, symbol_descriptive_string;
352 {
353 __ JumpIfSmi(a0, &to_string);
354 __ GetObjectType(a0, a1, a1);
355 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
356 __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
357 __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
358 __ Branch(&to_string, gt, a1, Operand(zero_reg));
359 __ Ret(USE_DELAY_SLOT);
360 __ mov(v0, a0);
361 }
362
363 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000364 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000365 {
366 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
367 __ DropAndRet(1);
368 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000369
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 // 3a. Convert a0 to a string.
371 __ bind(&to_string);
372 {
373 ToStringStub stub(masm->isolate());
374 __ TailCallStub(&stub);
375 }
376
377 // 3b. Convert symbol in a0 to a string.
378 __ bind(&symbol_descriptive_string);
379 {
380 __ Push(a0);
381 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
382 }
383}
384
385
386void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
387 // ----------- S t a t e -------------
388 // -- a0 : number of arguments
389 // -- a1 : constructor function
390 // -- a3 : new target
391 // -- ra : return address
392 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
393 // -- sp[argc * 8] : receiver
394 // -----------------------------------
395
396 // 1. Make sure we operate in the context of the called function.
397 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
398
399 // 2. Load the first argument into a0 and get rid of the rest (including the
400 // receiver).
401 {
402 Label no_arguments, done;
403 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
404 __ Dsubu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100405 __ Dlsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000406 __ ld(a0, MemOperand(sp));
407 __ Drop(2);
408 __ jmp(&done);
409 __ bind(&no_arguments);
410 __ LoadRoot(a0, Heap::kempty_stringRootIndex);
411 __ Drop(1);
412 __ bind(&done);
413 }
414
415 // 3. Make sure a0 is a string.
416 {
417 Label convert, done_convert;
418 __ JumpIfSmi(a0, &convert);
419 __ GetObjectType(a0, a2, a2);
420 __ And(t0, a2, Operand(kIsNotStringMask));
421 __ Branch(&done_convert, eq, t0, Operand(zero_reg));
422 __ bind(&convert);
423 {
424 FrameScope scope(masm, StackFrame::INTERNAL);
425 ToStringStub stub(masm->isolate());
426 __ Push(a1, a3);
427 __ CallStub(&stub);
428 __ Move(a0, v0);
429 __ Pop(a1, a3);
430 }
431 __ bind(&done_convert);
432 }
433
434 // 4. Check if new target and constructor differ.
435 Label new_object;
436 __ Branch(&new_object, ne, a1, Operand(a3));
437
438 // 5. Allocate a JSValue wrapper for the string.
439 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
440 __ Ret();
441
442 // 6. Fallback to the runtime to create new object.
443 __ bind(&new_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444 {
445 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100446 __ Push(a0);
447 FastNewObjectStub stub(masm->isolate());
448 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000449 __ Pop(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000451 __ Ret(USE_DELAY_SLOT);
452 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000453}
454
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
456 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
457 __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
458 __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
459 __ Jump(at);
460}
461
Ben Murdoch097c5b22016-05-18 11:27:45 +0100462static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
463 Runtime::FunctionId function_id) {
464 // ----------- S t a t e -------------
465 // -- a0 : argument count (preserved for callee)
466 // -- a1 : target function (preserved for callee)
467 // -- a3 : new target (preserved for callee)
468 // -----------------------------------
469 {
470 FrameScope scope(masm, StackFrame::INTERNAL);
471 // Push a copy of the function onto the stack.
472 // Push a copy of the target function and the new target.
473 __ SmiTag(a0);
474 __ Push(a0, a1, a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475
Ben Murdoch097c5b22016-05-18 11:27:45 +0100476 __ CallRuntime(function_id, 1);
477 // Restore target function and new target.
478 __ Pop(a0, a1, a3);
479 __ SmiUntag(a0);
480 }
481
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000482 __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
483 __ Jump(at);
484}
485
486
487void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
488 // Checking whether the queued function is ready for install is optional,
489 // since we come across interrupts and stack checks elsewhere. However,
490 // not checking may delay installing ready functions, and always checking
491 // would be quite expensive. A good compromise is to first check against
492 // stack limit as a cue for an interrupt signal.
493 Label ok;
494 __ LoadRoot(a4, Heap::kStackLimitRootIndex);
495 __ Branch(&ok, hs, sp, Operand(a4));
496
Ben Murdoch097c5b22016-05-18 11:27:45 +0100497 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000498
499 __ bind(&ok);
500 GenerateTailCallToSharedCode(masm);
501}
502
503
504static void Generate_JSConstructStubHelper(MacroAssembler* masm,
505 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100506 bool create_implicit_receiver,
507 bool check_derived_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000508 // ----------- S t a t e -------------
509 // -- a0 : number of arguments
510 // -- a1 : constructor function
511 // -- a2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000512 // -- a3 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100513 // -- cp : context
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514 // -- ra : return address
515 // -- sp[...]: constructor arguments
516 // -----------------------------------
517
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000518 Isolate* isolate = masm->isolate();
519
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 // Enter a construct frame.
521 {
522 FrameScope scope(masm, StackFrame::CONSTRUCT);
523
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000524 // Preserve the incoming parameters on the stack.
525 __ AssertUndefinedOrAllocationSite(a2, t0);
526 __ SmiTag(a0);
Ben Murdochda12d292016-06-02 14:46:10 +0100527 __ Push(cp, a2, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000528
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000529 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100530 __ Push(a1, a3);
531 FastNewObjectStub stub(masm->isolate());
532 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 __ mov(t0, v0);
534 __ Pop(a1, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000535
Ben Murdoch097c5b22016-05-18 11:27:45 +0100536 // ----------- S t a t e -------------
537 // -- a1: constructor function
538 // -- a3: new target
539 // -- t0: newly allocated object
540 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 __ ld(a0, MemOperand(sp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 __ SmiUntag(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 if (create_implicit_receiver) {
546 // Push the allocated receiver to the stack. We need two copies
547 // because we may have to return the original one and the calling
548 // conventions dictate that the called function pops the receiver.
549 __ Push(t0, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000552 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553
554 // Set up pointer to last argument.
555 __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
556
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557 // Copy arguments and receiver to the expression stack.
558 // a0: number of arguments
559 // a1: constructor function
560 // a2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561 // a3: new target
562 // t0: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 // sp[0]: receiver
564 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565 // sp[2]: number of arguments (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000566 Label loop, entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000567 __ mov(t0, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568 __ jmp(&entry);
569 __ bind(&loop);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100570 __ Dlsa(a4, a2, t0, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000571 __ ld(a5, MemOperand(a4));
572 __ push(a5);
573 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 __ Daddu(t0, t0, Operand(-1));
575 __ Branch(&loop, greater_equal, t0, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576
577 // Call the function.
578 // a0: number of arguments
579 // a1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000580 // a3: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100581 ParameterCount actual(a0);
582 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
583 CheckDebugStepCallWrapper());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000584
585 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 if (create_implicit_receiver && !is_api_function) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
588 }
589
590 // Restore context from the frame.
Ben Murdochda12d292016-06-02 14:46:10 +0100591 __ ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593 if (create_implicit_receiver) {
594 // If the result is an object (in the ECMA sense), we should get rid
595 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
596 // on page 74.
597 Label use_receiver, exit;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000599 // If the result is a smi, it is *not* an object in the ECMA sense.
600 // v0: result
601 // sp[0]: receiver (newly allocated object)
602 // sp[1]: number of arguments (smi-tagged)
603 __ JumpIfSmi(v0, &use_receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 // If the type of the result (stored in its map) is less than
606 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
607 __ GetObjectType(v0, a1, a3);
608 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000610 // Throw away the result of the constructor invocation and use the
611 // on-stack receiver as the result.
612 __ bind(&use_receiver);
613 __ ld(v0, MemOperand(sp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000614
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000615 // Remove receiver from the stack, remove caller arguments, and
616 // return.
617 __ bind(&exit);
618 // v0: result
619 // sp[0]: receiver (newly allocated object)
620 // sp[1]: number of arguments (smi-tagged)
621 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
622 } else {
623 __ ld(a1, MemOperand(sp));
624 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000625
626 // Leave construct frame.
627 }
628
Ben Murdoch097c5b22016-05-18 11:27:45 +0100629 // ES6 9.2.2. Step 13+
630 // Check that the result is not a Smi, indicating that the constructor result
631 // from a derived class is neither undefined nor an Object.
632 if (check_derived_construct) {
633 Label dont_throw;
634 __ JumpIfNotSmi(v0, &dont_throw);
635 {
636 FrameScope scope(masm, StackFrame::INTERNAL);
637 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
638 }
639 __ bind(&dont_throw);
640 }
641
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000642 __ SmiScale(a4, a1, kPointerSizeLog2);
643 __ Daddu(sp, sp, a4);
644 __ Daddu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000645 if (create_implicit_receiver) {
646 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
647 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648 __ Ret();
649}
650
651
652void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100653 Generate_JSConstructStubHelper(masm, false, true, false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000654}
655
656
657void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100658 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000659}
660
661
662void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100663 Generate_JSConstructStubHelper(masm, false, false, false);
664}
665
666
667void Builtins::Generate_JSBuiltinsConstructStubForDerived(
668 MacroAssembler* masm) {
669 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000670}
671
Ben Murdochc5610432016-08-08 18:44:38 +0100672// static
673void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
674 // ----------- S t a t e -------------
675 // -- v0 : the value to pass to the generator
676 // -- a1 : the JSGeneratorObject to resume
677 // -- a2 : the resume mode (tagged)
678 // -- ra : return address
679 // -----------------------------------
680 __ AssertGeneratorObject(a1);
681
682 // Store input value into generator object.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100683 __ sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
684 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
Ben Murdochc5610432016-08-08 18:44:38 +0100685 kRAHasNotBeenSaved, kDontSaveFPRegs);
686
687 // Store resume mode into generator object.
688 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
689
690 // Load suspended function and context.
691 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
692 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
693
694 // Flood function if we are stepping.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100695 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
696 Label stepping_prepared;
697 ExternalReference last_step_action =
698 ExternalReference::debug_last_step_action_address(masm->isolate());
699 STATIC_ASSERT(StepFrame > StepIn);
700 __ li(a5, Operand(last_step_action));
701 __ lb(a5, MemOperand(a5));
702 __ Branch(&prepare_step_in_if_stepping, ge, a5, Operand(StepIn));
703
704 // Flood function if we need to continue stepping in the suspended generator.
705 ExternalReference debug_suspended_generator =
706 ExternalReference::debug_suspended_generator_address(masm->isolate());
707 __ li(a5, Operand(debug_suspended_generator));
708 __ ld(a5, MemOperand(a5));
709 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
710 __ bind(&stepping_prepared);
Ben Murdochc5610432016-08-08 18:44:38 +0100711
712 // Push receiver.
713 __ ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
714 __ Push(a5);
715
716 // ----------- S t a t e -------------
717 // -- a1 : the JSGeneratorObject to resume
718 // -- a2 : the resume mode (tagged)
719 // -- a4 : generator function
720 // -- cp : generator context
721 // -- ra : return address
722 // -- sp[0] : generator receiver
723 // -----------------------------------
724
725 // Push holes for arguments to generator function. Since the parser forced
726 // context allocation for any variables in generators, the actual argument
727 // values have already been copied into the context and these dummy values
728 // will never be used.
729 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
730 __ lw(a3,
731 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
732 {
733 Label done_loop, loop;
734 __ bind(&loop);
735 __ Dsubu(a3, a3, Operand(1));
736 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
737 __ PushRoot(Heap::kTheHoleValueRootIndex);
738 __ Branch(&loop);
739 __ bind(&done_loop);
740 }
741
742 // Dispatch on the kind of generator object.
743 Label old_generator;
744 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
745 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
746 __ GetObjectType(a3, a3, a3);
747 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE));
748
749 // New-style (ignition/turbofan) generator object.
750 {
751 __ ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
752 __ lw(a0,
753 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochc5610432016-08-08 18:44:38 +0100754 // We abuse new.target both to indicate that this is a resume call and to
755 // pass in the generator object. In ordinary calls, new.target is always
756 // undefined because generator functions are non-constructable.
757 __ Move(a3, a1);
758 __ Move(a1, a4);
759 __ ld(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
760 __ Jump(a2);
761 }
762
763 // Old-style (full-codegen) generator object
764 __ bind(&old_generator);
765 {
766 // Enter a new JavaScript frame, and initialize its slots as they were when
767 // the generator was suspended.
768 FrameScope scope(masm, StackFrame::MANUAL);
769 __ Push(ra, fp);
770 __ Move(fp, sp);
771 __ Push(cp, a4);
772
773 // Restore the operand stack.
774 __ ld(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
775 __ ld(a3, FieldMemOperand(a0, FixedArray::kLengthOffset));
776 __ SmiUntag(a3);
777 __ Daddu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
778 __ Dlsa(a3, a0, a3, kPointerSizeLog2);
779 {
780 Label done_loop, loop;
781 __ bind(&loop);
782 __ Branch(&done_loop, eq, a0, Operand(a3));
783 __ ld(a5, MemOperand(a0));
784 __ Push(a5);
785 __ Branch(USE_DELAY_SLOT, &loop);
786 __ daddiu(a0, a0, kPointerSize); // In delay slot.
787 __ bind(&done_loop);
788 }
789
790 // Reset operand stack so we don't leak.
791 __ LoadRoot(a5, Heap::kEmptyFixedArrayRootIndex);
792 __ sd(a5, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
793
794 // Resume the generator function at the continuation.
795 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
796 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
797 __ Daddu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
798 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
799 __ SmiUntag(a2);
800 __ Daddu(a3, a3, Operand(a2));
801 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
802 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
803 __ Move(v0, a1); // Continuation expects generator object in v0.
804 __ Jump(a3);
805 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100806
807 __ bind(&prepare_step_in_if_stepping);
808 {
809 FrameScope scope(masm, StackFrame::INTERNAL);
810 __ Push(a1, a2, a4);
811 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
812 __ Pop(a1, a2);
813 }
814 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
815 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
816
817 __ bind(&prepare_step_in_suspended_generator);
818 {
819 FrameScope scope(masm, StackFrame::INTERNAL);
820 __ Push(a1, a2);
821 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
822 __ Pop(a1, a2);
823 }
824 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
825 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +0100826}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000827
828void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
829 FrameScope scope(masm, StackFrame::INTERNAL);
830 __ Push(a1);
831 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
832}
833
834
835enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
836
837
838// Clobbers a2; preserves all other registers.
839static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
840 IsTagged argc_is_tagged) {
841 // Check the stack for overflow. We are not trying to catch
842 // interruptions (e.g. debug break and preemption) here, so the "real stack
843 // limit" is checked.
844 Label okay;
845 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
846 // Make a2 the space we have left. The stack might already be overflowed
847 // here which will cause r2 to become negative.
848 __ dsubu(a2, sp, a2);
849 // Check if the arguments will overflow the stack.
850 if (argc_is_tagged == kArgcIsSmiTagged) {
851 __ SmiScale(a7, v0, kPointerSizeLog2);
852 } else {
853 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
854 __ dsll(a7, argc, kPointerSizeLog2);
855 }
856 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
857
858 // Out of stack space.
859 __ CallRuntime(Runtime::kThrowStackOverflow);
860
861 __ bind(&okay);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000862}
863
864
865static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
866 bool is_construct) {
867 // Called from JSEntryStub::GenerateBody
868
869 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000870 // -- a0: new.target
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871 // -- a1: function
872 // -- a2: receiver_pointer
873 // -- a3: argc
874 // -- s0: argv
875 // -----------------------------------
876 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877
878 // Enter an internal frame.
879 {
880 FrameScope scope(masm, StackFrame::INTERNAL);
881
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000882 // Setup the context (we need to use the caller context from the isolate).
883 ExternalReference context_address(Isolate::kContextAddress,
884 masm->isolate());
885 __ li(cp, Operand(context_address));
886 __ ld(cp, MemOperand(cp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887
888 // Push the function and the receiver onto the stack.
889 __ Push(a1, a2);
890
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 // Check if we have enough stack space to push all arguments.
892 // Clobbers a2.
893 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
894
895 // Remember new.target.
896 __ mov(a5, a0);
897
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000898 // Copy arguments to the stack in a loop.
899 // a3: argc
900 // s0: argv, i.e. points to first arg
901 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100902 __ Dlsa(a6, s0, a3, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000903 __ b(&entry);
904 __ nop(); // Branch delay slot nop.
905 // a6 points past last arg.
906 __ bind(&loop);
907 __ ld(a4, MemOperand(s0)); // Read next parameter.
908 __ daddiu(s0, s0, kPointerSize);
909 __ ld(a4, MemOperand(a4)); // Dereference handle.
910 __ push(a4); // Push parameter.
911 __ bind(&entry);
912 __ Branch(&loop, ne, s0, Operand(a6));
913
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000914 // Setup new.target and argc.
915 __ mov(a0, a3);
916 __ mov(a3, a5);
917
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918 // Initialize all JavaScript callee-saved registers, since they will be seen
919 // by the garbage collector as part of handlers.
920 __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
921 __ mov(s1, a4);
922 __ mov(s2, a4);
923 __ mov(s3, a4);
924 __ mov(s4, a4);
925 __ mov(s5, a4);
926 // s6 holds the root address. Do not clobber.
927 // s7 is cp. Do not init.
928
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000929 // Invoke the code.
930 Handle<Code> builtin = is_construct
931 ? masm->isolate()->builtins()->Construct()
932 : masm->isolate()->builtins()->Call();
933 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934
935 // Leave internal frame.
936 }
937 __ Jump(ra);
938}
939
940
941void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
942 Generate_JSEntryTrampolineHelper(masm, false);
943}
944
945
946void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
947 Generate_JSEntryTrampolineHelper(masm, true);
948}
949
Ben Murdoch61f157c2016-09-16 13:49:30 +0100950static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
951 Register args_count = scratch;
952
953 // Get the arguments + receiver count.
954 __ ld(args_count,
955 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
956 __ lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
957
958 // Leave the frame (also dropping the register file).
959 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
960
961 // Drop receiver + arguments.
962 __ Daddu(sp, sp, args_count);
963}
964
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000965// Generate code for entering a JS function with the interpreter.
966// On entry to the function the receiver and arguments have been pushed on the
967// stack left to right. The actual argument count matches the formal parameter
968// count expected by the function.
969//
970// The live registers are:
971// o a1: the JS function object being called.
972// o a3: the new target
973// o cp: our context
974// o fp: the caller's frame pointer
975// o sp: stack pointer
976// o ra: return address
977//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100978// The function builds an interpreter frame. See InterpreterFrameConstants in
979// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000980void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100981 ProfileEntryHookStub::MaybeCallEntryHook(masm);
982
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000983 // Open a frame scope to indicate that there is a frame on the stack. The
984 // MANUAL indicates that the scope shouldn't actually generate code to set up
985 // the frame (that is done below).
986 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +0100987 __ PushStandardFrame(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988
Ben Murdochc5610432016-08-08 18:44:38 +0100989 // Get the bytecode array from the function object (or from the DebugInfo if
990 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000991 __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100992 Label load_debug_bytecode_array, bytecode_array_loaded;
993 Register debug_info = kInterpreterBytecodeArrayRegister;
994 DCHECK(!debug_info.is(a0));
995 __ ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
996 __ Branch(&load_debug_bytecode_array, ne, debug_info,
997 Operand(DebugInfo::uninitialized()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000998 __ ld(kInterpreterBytecodeArrayRegister,
999 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001000 __ bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001
Ben Murdochc5610432016-08-08 18:44:38 +01001002 // Check function data field is actually a BytecodeArray object.
1003 Label bytecode_array_not_present;
1004 __ JumpIfRoot(kInterpreterBytecodeArrayRegister,
1005 Heap::kUndefinedValueRootIndex, &bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001007 __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
1008 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
1009 Operand(zero_reg));
1010 __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
1011 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
1012 Operand(BYTECODE_ARRAY_TYPE));
1013 }
1014
Ben Murdochc5610432016-08-08 18:44:38 +01001015 // Load initial bytecode offset.
1016 __ li(kInterpreterBytecodeOffsetRegister,
1017 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1018
1019 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1020 __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
1021 __ Push(a3, kInterpreterBytecodeArrayRegister, a4);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001023 // Allocate the local and temporary register file on the stack.
1024 {
1025 // Load frame size (word) from the BytecodeArray object.
1026 __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1027 BytecodeArray::kFrameSizeOffset));
1028
1029 // Do a stack check to ensure we don't go over the limit.
1030 Label ok;
1031 __ Dsubu(a5, sp, Operand(a4));
1032 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1033 __ Branch(&ok, hs, a5, Operand(a2));
1034 __ CallRuntime(Runtime::kThrowStackOverflow);
1035 __ bind(&ok);
1036
1037 // If ok, push undefined as the initial value for all register file entries.
1038 Label loop_header;
1039 Label loop_check;
1040 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
1041 __ Branch(&loop_check);
1042 __ bind(&loop_header);
1043 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1044 __ push(a5);
1045 // Continue loop if not done.
1046 __ bind(&loop_check);
1047 __ Dsubu(a4, a4, Operand(kPointerSize));
1048 __ Branch(&loop_header, ge, a4, Operand(zero_reg));
1049 }
1050
Ben Murdochc5610432016-08-08 18:44:38 +01001051 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001052 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001053 __ li(kInterpreterDispatchTableRegister,
1054 Operand(ExternalReference::interpreter_dispatch_table_address(
1055 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001056
1057 // Dispatch to the first bytecode handler for the function.
1058 __ Daddu(a0, kInterpreterBytecodeArrayRegister,
1059 kInterpreterBytecodeOffsetRegister);
1060 __ lbu(a0, MemOperand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001061 __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001062 __ ld(at, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001063 __ Call(at);
Ben Murdochc5610432016-08-08 18:44:38 +01001064 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001065
Ben Murdochc5610432016-08-08 18:44:38 +01001066 // The return value is in v0.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001067 LeaveInterpreterFrame(masm, t0);
Ben Murdochc5610432016-08-08 18:44:38 +01001068 __ Jump(ra);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001069
1070 // Load debug copy of the bytecode array.
1071 __ bind(&load_debug_bytecode_array);
1072 __ ld(kInterpreterBytecodeArrayRegister,
1073 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1074 __ Branch(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075
Ben Murdochc5610432016-08-08 18:44:38 +01001076 // If the bytecode array is no longer present, then the underlying function
1077 // has been switched to a different kind of code and we heal the closure by
1078 // switching the code entry field over to the new code object as well.
1079 __ bind(&bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
Ben Murdochc5610432016-08-08 18:44:38 +01001081 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1082 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kCodeOffset));
1083 __ Daddu(a4, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
1084 __ sd(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1085 __ RecordWriteCodeEntryField(a1, a4, a5);
1086 __ Jump(a4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087}
1088
Ben Murdoch61f157c2016-09-16 13:49:30 +01001089void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
1090 // Save the function and context for call to CompileBaseline.
1091 __ ld(a1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1092 __ ld(kContextRegister,
1093 MemOperand(fp, StandardFrameConstants::kContextOffset));
1094
1095 // Leave the frame before recompiling for baseline so that we don't count as
1096 // an activation on the stack.
1097 LeaveInterpreterFrame(masm, t0);
1098
1099 {
1100 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1101 // Push return value.
1102 __ push(v0);
1103
1104 // Push function as argument and compile for baseline.
1105 __ push(a1);
1106 __ CallRuntime(Runtime::kCompileBaseline);
1107
1108 // Restore return value.
1109 __ pop(v0);
1110 }
1111 __ Jump(ra);
1112}
1113
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001114// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001115void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1116 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001117 // ----------- S t a t e -------------
1118 // -- a0 : the number of arguments (not including the receiver)
1119 // -- a2 : the address of the first argument to be pushed. Subsequent
1120 // arguments should be consecutive above this, in the same order as
1121 // they are to be pushed onto the stack.
1122 // -- a1 : the target to call (can be any Object).
1123 // -----------------------------------
1124
1125 // Find the address of the last argument.
1126 __ Daddu(a3, a0, Operand(1)); // Add one for receiver.
1127 __ dsll(a3, a3, kPointerSizeLog2);
1128 __ Dsubu(a3, a2, Operand(a3));
1129
1130 // Push the arguments.
1131 Label loop_header, loop_check;
1132 __ Branch(&loop_check);
1133 __ bind(&loop_header);
1134 __ ld(t0, MemOperand(a2));
1135 __ Daddu(a2, a2, Operand(-kPointerSize));
1136 __ push(t0);
1137 __ bind(&loop_check);
1138 __ Branch(&loop_header, gt, a2, Operand(a3));
1139
1140 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001141 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1142 tail_call_mode),
1143 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144}
1145
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001146// static
1147void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1148 // ----------- S t a t e -------------
1149 // -- a0 : argument count (not including receiver)
1150 // -- a3 : new target
1151 // -- a1 : constructor to call
1152 // -- a2 : address of the first argument
1153 // -----------------------------------
1154
1155 // Find the address of the last argument.
1156 __ dsll(t0, a0, kPointerSizeLog2);
1157 __ Dsubu(t0, a2, Operand(t0));
1158
1159 // Push a slot for the receiver.
1160 __ push(zero_reg);
1161
1162 // Push the arguments.
1163 Label loop_header, loop_check;
1164 __ Branch(&loop_check);
1165 __ bind(&loop_header);
1166 __ ld(t1, MemOperand(a2));
1167 __ Daddu(a2, a2, Operand(-kPointerSize));
1168 __ push(t1);
1169 __ bind(&loop_check);
1170 __ Branch(&loop_header, gt, a2, Operand(t0));
1171
1172 // Call the constructor with a0, a1, and a3 unmodified.
1173 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1174}
1175
Ben Murdochc5610432016-08-08 18:44:38 +01001176void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1177 // Set the return address to the correct point in the interpreter entry
1178 // trampoline.
1179 Smi* interpreter_entry_return_pc_offset(
1180 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1181 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1182 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1183 __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1184 Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001185
Ben Murdochc5610432016-08-08 18:44:38 +01001186 // Initialize the dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001187 __ li(kInterpreterDispatchTableRegister,
1188 Operand(ExternalReference::interpreter_dispatch_table_address(
1189 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001190
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001192 __ ld(kInterpreterBytecodeArrayRegister,
1193 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194
1195 if (FLAG_debug_code) {
1196 // Check function data field is actually a BytecodeArray object.
1197 __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1198 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1199 Operand(zero_reg));
1200 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1201 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1202 Operand(BYTECODE_ARRAY_TYPE));
1203 }
1204
1205 // Get the target bytecode offset from the frame.
1206 __ ld(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001207 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001208 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1209
1210 // Dispatch to the target bytecode.
1211 __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1212 kInterpreterBytecodeOffsetRegister);
1213 __ lbu(a1, MemOperand(a1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001214 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001215 __ ld(a1, MemOperand(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001216 __ Jump(a1);
1217}
1218
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001219void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001220 // ----------- S t a t e -------------
1221 // -- a0 : argument count (preserved for callee)
1222 // -- a3 : new target (preserved for callee)
1223 // -- a1 : target function (preserved for callee)
1224 // -----------------------------------
1225 // First lookup code, maybe we don't need to compile!
1226 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1227 Label maybe_call_runtime;
1228 Label try_shared;
1229 Label loop_top, loop_bottom;
1230
1231 Register argument_count = a0;
1232 Register closure = a1;
1233 Register new_target = a3;
1234 __ push(argument_count);
1235 __ push(new_target);
1236 __ push(closure);
1237
1238 Register map = a0;
1239 Register index = a2;
1240 __ ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1241 __ ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1242 __ ld(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1243 __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
1244
1245 // Find literals.
1246 // a3 : native context
1247 // a2 : length / index
1248 // a0 : optimized code map
1249 // stack[0] : new target
1250 // stack[4] : closure
1251 Register native_context = a3;
1252 __ ld(native_context, NativeContextMemOperand());
1253
1254 __ bind(&loop_top);
1255 Register temp = a1;
1256 Register array_pointer = a5;
1257
1258 // Does the native context match?
1259 __ SmiScale(at, index, kPointerSizeLog2);
1260 __ Daddu(array_pointer, map, Operand(at));
1261 __ ld(temp, FieldMemOperand(array_pointer,
1262 SharedFunctionInfo::kOffsetToPreviousContext));
1263 __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1264 __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1265 // OSR id set to none?
1266 __ ld(temp, FieldMemOperand(array_pointer,
1267 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1268 const int bailout_id = BailoutId::None().ToInt();
1269 __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001270
Ben Murdochc5610432016-08-08 18:44:38 +01001271 // Literals available?
Ben Murdoch61f157c2016-09-16 13:49:30 +01001272 Label got_literals, maybe_cleared_weakcell;
Ben Murdochc5610432016-08-08 18:44:38 +01001273 __ ld(temp, FieldMemOperand(array_pointer,
1274 SharedFunctionInfo::kOffsetToPreviousLiterals));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001275 // temp contains either a WeakCell pointing to the literals array or the
1276 // literals array directly.
1277 __ ld(a4, FieldMemOperand(temp, WeakCell::kValueOffset));
1278 __ JumpIfSmi(a4, &maybe_cleared_weakcell);
1279 // a4 is a pointer, therefore temp is a WeakCell pointing to a literals array.
Ben Murdochc5610432016-08-08 18:44:38 +01001280 __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001281 __ jmp(&got_literals);
1282
1283 // a4 is a smi. If it's 0, then we are looking at a cleared WeakCell
1284 // around the literals array, and we should visit the runtime. If it's > 0,
1285 // then temp already contains the literals array.
1286 __ bind(&maybe_cleared_weakcell);
1287 __ Branch(&gotta_call_runtime, eq, a4, Operand(Smi::FromInt(0)));
Ben Murdochc5610432016-08-08 18:44:38 +01001288
1289 // Save the literals in the closure.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001290 __ bind(&got_literals);
Ben Murdochc5610432016-08-08 18:44:38 +01001291 __ ld(a4, MemOperand(sp, 0));
1292 __ sd(temp, FieldMemOperand(a4, JSFunction::kLiteralsOffset));
1293 __ push(index);
1294 __ RecordWriteField(a4, JSFunction::kLiteralsOffset, temp, index,
1295 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1296 OMIT_SMI_CHECK);
1297 __ pop(index);
1298
1299 // Code available?
1300 Register entry = a4;
1301 __ ld(entry,
1302 FieldMemOperand(array_pointer,
1303 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1304 __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1305 __ JumpIfSmi(entry, &maybe_call_runtime);
1306
1307 // Found literals and code. Get them into the closure and return.
1308 __ pop(closure);
1309 // Store code entry in the closure.
1310 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1311
1312 Label install_optimized_code_and_tailcall;
1313 __ bind(&install_optimized_code_and_tailcall);
1314 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1315 __ RecordWriteCodeEntryField(closure, entry, a5);
1316
1317 // Link the closure into the optimized function list.
1318 // a4 : code entry
1319 // a3 : native context
1320 // a1 : closure
1321 __ ld(a5,
1322 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1323 __ sd(a5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1324 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, a5, a0,
1325 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1326 OMIT_SMI_CHECK);
1327 const int function_list_offset =
1328 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1329 __ sd(closure,
1330 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1331 // Save closure before the write barrier.
1332 __ mov(a5, closure);
1333 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1334 kRAHasNotBeenSaved, kDontSaveFPRegs);
1335 __ mov(closure, a5);
1336 __ pop(new_target);
1337 __ pop(argument_count);
1338 __ Jump(entry);
1339
1340 __ bind(&loop_bottom);
1341 __ Dsubu(index, index,
1342 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1343 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1344
1345 // We found neither literals nor code.
1346 __ jmp(&gotta_call_runtime);
1347
1348 __ bind(&maybe_call_runtime);
1349 __ pop(closure);
1350
1351 // Last possibility. Check the context free optimized code map entry.
1352 __ ld(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1353 SharedFunctionInfo::kSharedCodeIndex));
1354 __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1355 __ JumpIfSmi(entry, &try_shared);
1356
1357 // Store code entry in the closure.
1358 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1359 __ jmp(&install_optimized_code_and_tailcall);
1360
1361 __ bind(&try_shared);
1362 __ pop(new_target);
1363 __ pop(argument_count);
1364 // Is the full code valid?
1365 __ ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1366 __ ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1367 __ lw(a5, FieldMemOperand(entry, Code::kFlagsOffset));
1368 __ And(a5, a5, Operand(Code::KindField::kMask));
1369 __ dsrl(a5, a5, Code::KindField::kShift);
1370 __ Branch(&gotta_call_runtime_no_stack, eq, a5, Operand(Code::BUILTIN));
1371 // Yes, install the full code.
1372 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1373 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1374 __ RecordWriteCodeEntryField(closure, entry, a5);
1375 __ Jump(entry);
1376
1377 __ bind(&gotta_call_runtime);
1378 __ pop(closure);
1379 __ pop(new_target);
1380 __ pop(argument_count);
1381 __ bind(&gotta_call_runtime_no_stack);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001382 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001383}
1384
Ben Murdochc5610432016-08-08 18:44:38 +01001385void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1386 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1387}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001388
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001389void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001390 GenerateTailCallToReturnedCode(masm,
1391 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392}
1393
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001394void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001395 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001396}
1397
1398
1399static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1400 // For now, we are relying on the fact that make_code_young doesn't do any
1401 // garbage collection which allows us to save/restore the registers without
1402 // worrying about which of them contain pointers. We also don't build an
1403 // internal frame to make the code faster, since we shouldn't have to do stack
1404 // crawls in MakeCodeYoung. This seems a bit fragile.
1405
1406 // Set a0 to point to the head of the PlatformCodeAge sequence.
1407 __ Dsubu(a0, a0,
1408 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1409
1410 // The following registers must be saved and restored when calling through to
1411 // the runtime:
1412 // a0 - contains return address (beginning of patch sequence)
1413 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001414 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001416 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001417 FrameScope scope(masm, StackFrame::MANUAL);
1418 __ MultiPush(saved_regs);
1419 __ PrepareCallCFunction(2, 0, a2);
1420 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1421 __ CallCFunction(
1422 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1423 __ MultiPop(saved_regs);
1424 __ Jump(a0);
1425}
1426
1427#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1428void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1429 MacroAssembler* masm) { \
1430 GenerateMakeCodeYoungAgainCommon(masm); \
1431} \
1432void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1433 MacroAssembler* masm) { \
1434 GenerateMakeCodeYoungAgainCommon(masm); \
1435}
1436CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1437#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1438
1439
1440void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1441 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1442 // that make_code_young doesn't do any garbage collection which allows us to
1443 // save/restore the registers without worrying about which of them contain
1444 // pointers.
1445
1446 // Set a0 to point to the head of the PlatformCodeAge sequence.
1447 __ Dsubu(a0, a0,
1448 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1449
1450 // The following registers must be saved and restored when calling through to
1451 // the runtime:
1452 // a0 - contains return address (beginning of patch sequence)
1453 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001454 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001455 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001456 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001457 FrameScope scope(masm, StackFrame::MANUAL);
1458 __ MultiPush(saved_regs);
1459 __ PrepareCallCFunction(2, 0, a2);
1460 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1461 __ CallCFunction(
1462 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1463 2);
1464 __ MultiPop(saved_regs);
1465
1466 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001467 __ PushStandardFrame(a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001468
1469 // Jump to point after the code-age stub.
1470 __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength)));
1471 __ Jump(a0);
1472}
1473
1474
1475void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1476 GenerateMakeCodeYoungAgainCommon(masm);
1477}
1478
1479
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001480void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1481 Generate_MarkCodeAsExecutedOnce(masm);
1482}
1483
1484
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001485static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1486 SaveFPRegsMode save_doubles) {
1487 {
1488 FrameScope scope(masm, StackFrame::INTERNAL);
1489
1490 // Preserve registers across notification, this is important for compiled
1491 // stubs that tail call the runtime on deopts passing their parameters in
1492 // registers.
1493 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1494 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001495 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001496 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1497 }
1498
1499 __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state
1500 __ Jump(ra); // Jump to miss handler
1501}
1502
1503
1504void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1505 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1506}
1507
1508
1509void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1510 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1511}
1512
1513
1514static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1515 Deoptimizer::BailoutType type) {
1516 {
1517 FrameScope scope(masm, StackFrame::INTERNAL);
1518 // Pass the function and deoptimization type to the runtime system.
1519 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1520 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001521 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001522 }
1523
1524 // Get the full codegen state from the stack and untag it -> a6.
1525 __ ld(a6, MemOperand(sp, 0 * kPointerSize));
1526 __ SmiUntag(a6);
1527 // Switch on the state.
1528 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001529 __ Branch(
1530 &with_tos_register, ne, a6,
1531 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001532 __ Ret(USE_DELAY_SLOT);
1533 // Safe to fill delay slot Addu will emit one instruction.
1534 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1535
1536 __ bind(&with_tos_register);
Ben Murdochc5610432016-08-08 18:44:38 +01001537 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001538 __ ld(v0, MemOperand(sp, 1 * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001539 __ Branch(
1540 &unknown_state, ne, a6,
1541 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001542
1543 __ Ret(USE_DELAY_SLOT);
1544 // Safe to fill delay slot Addu will emit one instruction.
1545 __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1546
1547 __ bind(&unknown_state);
1548 __ stop("no cases left");
1549}
1550
1551
1552void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1553 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1554}
1555
1556
1557void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1558 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1559}
1560
1561
1562void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1563 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1564}
1565
1566
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001567// Clobbers {t2, t3, a4, a5}.
1568static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1569 Register function_template_info,
1570 Label* receiver_check_failed) {
1571 Register signature = t2;
1572 Register map = t3;
1573 Register constructor = a4;
1574 Register scratch = a5;
1575
1576 // If there is no signature, return the holder.
1577 __ ld(signature, FieldMemOperand(function_template_info,
1578 FunctionTemplateInfo::kSignatureOffset));
1579 Label receiver_check_passed;
1580 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1581 &receiver_check_passed);
1582
1583 // Walk the prototype chain.
1584 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1585 Label prototype_loop_start;
1586 __ bind(&prototype_loop_start);
1587
1588 // Get the constructor, if any.
1589 __ GetMapConstructor(constructor, map, scratch, scratch);
1590 Label next_prototype;
1591 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1592 Register type = constructor;
1593 __ ld(type,
1594 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1595 __ ld(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1596
1597 // Loop through the chain of inheriting function templates.
1598 Label function_template_loop;
1599 __ bind(&function_template_loop);
1600
1601 // If the signatures match, we have a compatible receiver.
1602 __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1603 USE_DELAY_SLOT);
1604
1605 // If the current type is not a FunctionTemplateInfo, load the next prototype
1606 // in the chain.
1607 __ JumpIfSmi(type, &next_prototype);
1608 __ GetObjectType(type, scratch, scratch);
1609 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1610
1611 // Otherwise load the parent function template and iterate.
1612 __ ld(type,
1613 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1614 __ Branch(&function_template_loop);
1615
1616 // Load the next prototype.
1617 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001618 __ lwu(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001619 __ DecodeField<Map::HasHiddenPrototype>(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001620 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001621
1622 __ ld(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1623 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001624 // Iterate.
1625 __ Branch(&prototype_loop_start);
1626
1627 __ bind(&receiver_check_passed);
1628}
1629
1630
1631void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1632 // ----------- S t a t e -------------
1633 // -- a0 : number of arguments excluding receiver
1634 // -- a1 : callee
1635 // -- ra : return address
1636 // -- sp[0] : last argument
1637 // -- ...
1638 // -- sp[8 * (argc - 1)] : first argument
1639 // -- sp[8 * argc] : receiver
1640 // -----------------------------------
1641
1642 // Load the FunctionTemplateInfo.
1643 __ ld(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1644 __ ld(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1645
1646 // Do the compatible receiver check
1647 Label receiver_check_failed;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001648 __ Dlsa(t8, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001649 __ ld(t0, MemOperand(t8));
1650 CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1651
1652 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1653 // beginning of the code.
1654 __ ld(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1655 __ ld(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1656 __ Daddu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1657 __ Jump(t2);
1658
1659 // Compatible receiver check failed: throw an Illegal Invocation exception.
1660 __ bind(&receiver_check_failed);
1661 // Drop the arguments (including the receiver);
1662 __ Daddu(t8, t8, Operand(kPointerSize));
1663 __ daddu(sp, t8, zero_reg);
1664 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1665}
1666
1667
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001668void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1669 // Lookup the function in the JavaScript frame.
1670 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1671 {
1672 FrameScope scope(masm, StackFrame::INTERNAL);
1673 // Pass function as argument.
1674 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001676 }
1677
1678 // If the code object is null, just return to the unoptimized code.
1679 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1680
1681 // Load deoptimization data from the code object.
1682 // <deopt_data> = <code>[#deoptimization_data_offset]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001683 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001684
1685 // Load the OSR entrypoint offset from the deoptimization data.
1686 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1687 __ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1688 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1689 __ SmiUntag(a1);
1690
1691 // Compute the target address = code_obj + header_size + osr_offset
1692 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1693 __ daddu(v0, v0, a1);
1694 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1695
1696 // And "return" to the OSR entry point of the function.
1697 __ Ret();
1698}
1699
1700
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701// static
1702void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1703 int field_index) {
1704 // ----------- S t a t e -------------
Ben Murdoch61f157c2016-09-16 13:49:30 +01001705 // -- a0 : number of arguments
1706 // -- a1 : function
1707 // -- cp : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001708 // -- sp[0] : receiver
1709 // -----------------------------------
1710
1711 // 1. Pop receiver into a0 and check that it's actually a JSDate object.
1712 Label receiver_not_date;
1713 {
1714 __ Pop(a0);
1715 __ JumpIfSmi(a0, &receiver_not_date);
1716 __ GetObjectType(a0, t0, t0);
1717 __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
1718 }
1719
1720 // 2. Load the specified date field, falling back to the runtime as necessary.
1721 if (field_index == JSDate::kDateValue) {
1722 __ Ret(USE_DELAY_SLOT);
1723 __ ld(v0, FieldMemOperand(a0, JSDate::kValueOffset)); // In delay slot.
1724 } else {
1725 if (field_index < JSDate::kFirstUncachedField) {
1726 Label stamp_mismatch;
1727 __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1728 __ ld(a1, MemOperand(a1));
1729 __ ld(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
1730 __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
1731 __ Ret(USE_DELAY_SLOT);
1732 __ ld(v0, FieldMemOperand(
1733 a0, JSDate::kValueOffset +
1734 field_index * kPointerSize)); // In delay slot.
1735 __ bind(&stamp_mismatch);
1736 }
1737 FrameScope scope(masm, StackFrame::INTERNAL);
1738 __ PrepareCallCFunction(2, t0);
1739 __ li(a1, Operand(Smi::FromInt(field_index)));
1740 __ CallCFunction(
1741 ExternalReference::get_date_field_function(masm->isolate()), 2);
1742 }
1743 __ Ret();
1744
1745 // 3. Raise a TypeError if the receiver is not a date.
1746 __ bind(&receiver_not_date);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001747 {
1748 FrameScope scope(masm, StackFrame::MANUAL);
1749 __ Push(a0, ra, fp);
1750 __ Move(fp, sp);
1751 __ Push(cp, a1);
1752 __ Push(Smi::FromInt(0));
1753 __ CallRuntime(Runtime::kThrowNotDateError);
1754 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755}
1756
Ben Murdochda12d292016-06-02 14:46:10 +01001757// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001758void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1759 // ----------- S t a t e -------------
1760 // -- a0 : argc
1761 // -- sp[0] : argArray
1762 // -- sp[4] : thisArg
1763 // -- sp[8] : receiver
1764 // -----------------------------------
1765
1766 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1767 // arguments from the stack (including the receiver), and push thisArg (if
1768 // present) instead.
1769 {
1770 Label no_arg;
1771 Register scratch = a4;
1772 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1773 __ mov(a3, a2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001774 // Dlsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001775 __ dsll(scratch, a0, kPointerSizeLog2);
1776 __ Daddu(a0, sp, Operand(scratch));
1777 __ ld(a1, MemOperand(a0)); // receiver
1778 __ Dsubu(a0, a0, Operand(kPointerSize));
1779 __ Branch(&no_arg, lt, a0, Operand(sp));
1780 __ ld(a2, MemOperand(a0)); // thisArg
1781 __ Dsubu(a0, a0, Operand(kPointerSize));
1782 __ Branch(&no_arg, lt, a0, Operand(sp));
1783 __ ld(a3, MemOperand(a0)); // argArray
1784 __ bind(&no_arg);
1785 __ Daddu(sp, sp, Operand(scratch));
1786 __ sd(a2, MemOperand(sp));
1787 __ mov(a0, a3);
1788 }
1789
1790 // ----------- S t a t e -------------
1791 // -- a0 : argArray
1792 // -- a1 : receiver
1793 // -- sp[0] : thisArg
1794 // -----------------------------------
1795
1796 // 2. Make sure the receiver is actually callable.
1797 Label receiver_not_callable;
1798 __ JumpIfSmi(a1, &receiver_not_callable);
1799 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1800 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1801 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1802 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
1803
1804 // 3. Tail call with no arguments if argArray is null or undefined.
1805 Label no_arguments;
1806 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1807 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1808
1809 // 4a. Apply the receiver to the given argArray (passing undefined for
1810 // new.target).
1811 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1812 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1813
1814 // 4b. The argArray is either null or undefined, so we tail call without any
1815 // arguments to the receiver.
1816 __ bind(&no_arguments);
1817 {
1818 __ mov(a0, zero_reg);
1819 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1820 }
1821
1822 // 4c. The receiver is not callable, throw an appropriate TypeError.
1823 __ bind(&receiver_not_callable);
1824 {
1825 __ sd(a1, MemOperand(sp));
1826 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1827 }
1828}
1829
1830
1831// static
1832void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001833 // 1. Make sure we have at least one argument.
1834 // a0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001835 {
1836 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001837 __ Branch(&done, ne, a0, Operand(zero_reg));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001838 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001839 __ Daddu(a0, a0, Operand(1));
1840 __ bind(&done);
1841 }
1842
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001843 // 2. Get the function to call (passed as receiver) from the stack.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001844 // a0: actual number of arguments
Ben Murdoch097c5b22016-05-18 11:27:45 +01001845 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001846 __ ld(a1, MemOperand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001847
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001848 // 3. Shift arguments and return address one slot down on the stack
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001849 // (overwriting the original receiver). Adjust argument count to make
1850 // the original first argument the new receiver.
1851 // a0: actual number of arguments
1852 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001853 {
1854 Label loop;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001855 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001856 __ Dlsa(a2, sp, a0, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857
1858 __ bind(&loop);
1859 __ ld(at, MemOperand(a2, -kPointerSize));
1860 __ sd(at, MemOperand(a2));
1861 __ Dsubu(a2, a2, Operand(kPointerSize));
1862 __ Branch(&loop, ne, a2, Operand(sp));
1863 // Adjust the actual number of arguments and remove the top element
1864 // (which is a copy of the last argument).
1865 __ Dsubu(a0, a0, Operand(1));
1866 __ Pop();
1867 }
1868
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001869 // 4. Call the callable.
1870 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001871}
1872
1873
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001874void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1875 // ----------- S t a t e -------------
1876 // -- a0 : argc
1877 // -- sp[0] : argumentsList
1878 // -- sp[4] : thisArgument
1879 // -- sp[8] : target
1880 // -- sp[12] : receiver
1881 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001882
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001883 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1884 // remove all arguments from the stack (including the receiver), and push
1885 // thisArgument (if present) instead.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001886 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001887 Label no_arg;
1888 Register scratch = a4;
1889 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1890 __ mov(a2, a1);
1891 __ mov(a3, a1);
1892 __ dsll(scratch, a0, kPointerSizeLog2);
1893 __ mov(a0, scratch);
1894 __ Dsubu(a0, a0, Operand(kPointerSize));
1895 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1896 __ Daddu(a0, sp, Operand(a0));
1897 __ ld(a1, MemOperand(a0)); // target
1898 __ Dsubu(a0, a0, Operand(kPointerSize));
1899 __ Branch(&no_arg, lt, a0, Operand(sp));
1900 __ ld(a2, MemOperand(a0)); // thisArgument
1901 __ Dsubu(a0, a0, Operand(kPointerSize));
1902 __ Branch(&no_arg, lt, a0, Operand(sp));
1903 __ ld(a3, MemOperand(a0)); // argumentsList
1904 __ bind(&no_arg);
1905 __ Daddu(sp, sp, Operand(scratch));
1906 __ sd(a2, MemOperand(sp));
1907 __ mov(a0, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001908 }
1909
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001910 // ----------- S t a t e -------------
1911 // -- a0 : argumentsList
1912 // -- a1 : target
1913 // -- sp[0] : thisArgument
1914 // -----------------------------------
1915
1916 // 2. Make sure the target is actually callable.
1917 Label target_not_callable;
1918 __ JumpIfSmi(a1, &target_not_callable);
1919 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1920 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1921 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1922 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
1923
1924 // 3a. Apply the target to the given argumentsList (passing undefined for
1925 // new.target).
1926 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1927 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1928
1929 // 3b. The target is not callable, throw an appropriate TypeError.
1930 __ bind(&target_not_callable);
1931 {
1932 __ sd(a1, MemOperand(sp));
1933 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1934 }
1935}
1936
1937
1938void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1939 // ----------- S t a t e -------------
1940 // -- a0 : argc
1941 // -- sp[0] : new.target (optional)
1942 // -- sp[4] : argumentsList
1943 // -- sp[8] : target
1944 // -- sp[12] : receiver
1945 // -----------------------------------
1946
1947 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1948 // new.target into a3 (if present, otherwise use target), remove all
1949 // arguments from the stack (including the receiver), and push thisArgument
1950 // (if present) instead.
1951 {
1952 Label no_arg;
1953 Register scratch = a4;
1954 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1955 __ mov(a2, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001956 // Dlsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001957 __ dsll(scratch, a0, kPointerSizeLog2);
1958 __ Daddu(a0, sp, Operand(scratch));
1959 __ sd(a2, MemOperand(a0)); // receiver
1960 __ Dsubu(a0, a0, Operand(kPointerSize));
1961 __ Branch(&no_arg, lt, a0, Operand(sp));
1962 __ ld(a1, MemOperand(a0)); // target
1963 __ mov(a3, a1); // new.target defaults to target
1964 __ Dsubu(a0, a0, Operand(kPointerSize));
1965 __ Branch(&no_arg, lt, a0, Operand(sp));
1966 __ ld(a2, MemOperand(a0)); // argumentsList
1967 __ Dsubu(a0, a0, Operand(kPointerSize));
1968 __ Branch(&no_arg, lt, a0, Operand(sp));
1969 __ ld(a3, MemOperand(a0)); // new.target
1970 __ bind(&no_arg);
1971 __ Daddu(sp, sp, Operand(scratch));
1972 __ mov(a0, a2);
1973 }
1974
1975 // ----------- S t a t e -------------
1976 // -- a0 : argumentsList
1977 // -- a3 : new.target
1978 // -- a1 : target
1979 // -- sp[0] : receiver (undefined)
1980 // -----------------------------------
1981
1982 // 2. Make sure the target is actually a constructor.
1983 Label target_not_constructor;
1984 __ JumpIfSmi(a1, &target_not_constructor);
1985 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1986 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1987 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1988 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
1989
1990 // 3. Make sure the target is actually a constructor.
1991 Label new_target_not_constructor;
1992 __ JumpIfSmi(a3, &new_target_not_constructor);
1993 __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset));
1994 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1995 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1996 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
1997
1998 // 4a. Construct the target with the given new.target and argumentsList.
1999 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2000
2001 // 4b. The target is not a constructor, throw an appropriate TypeError.
2002 __ bind(&target_not_constructor);
2003 {
2004 __ sd(a1, MemOperand(sp));
2005 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2006 }
2007
2008 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2009 __ bind(&new_target_not_constructor);
2010 {
2011 __ sd(a3, MemOperand(sp));
2012 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2013 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002014}
2015
2016
2017static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2018 Label* stack_overflow) {
2019 // ----------- S t a t e -------------
2020 // -- a0 : actual number of arguments
2021 // -- a1 : function (passed through to callee)
2022 // -- a2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002023 // -- a3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002024 // -----------------------------------
2025 // Check the stack for overflow. We are not trying to catch
2026 // interruptions (e.g. debug break and preemption) here, so the "real stack
2027 // limit" is checked.
2028 __ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
2029 // Make a5 the space we have left. The stack might already be overflowed
2030 // here which will cause a5 to become negative.
2031 __ dsubu(a5, sp, a5);
2032 // Check if the arguments will overflow the stack.
2033 __ dsll(at, a2, kPointerSizeLog2);
2034 // Signed comparison.
2035 __ Branch(stack_overflow, le, a5, Operand(at));
2036}
2037
2038
2039static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2040 // __ sll(a0, a0, kSmiTagSize);
2041 __ dsll32(a0, a0, 0);
2042 __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2043 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
2044 __ Daddu(fp, sp,
2045 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
2046}
2047
2048
2049static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2050 // ----------- S t a t e -------------
2051 // -- v0 : result being passed through
2052 // -----------------------------------
2053 // Get the number of arguments passed (as a smi), tear down the frame and
2054 // then tear down the parameters.
2055 __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2056 kPointerSize)));
2057 __ mov(sp, fp);
2058 __ MultiPop(fp.bit() | ra.bit());
2059 __ SmiScale(a4, a1, kPointerSizeLog2);
2060 __ Daddu(sp, sp, a4);
2061 // Adjust for the receiver.
2062 __ Daddu(sp, sp, Operand(kPointerSize));
2063}
2064
2065
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002066// static
2067void Builtins::Generate_Apply(MacroAssembler* masm) {
2068 // ----------- S t a t e -------------
2069 // -- a0 : argumentsList
2070 // -- a1 : target
2071 // -- a3 : new.target (checked to be constructor or undefined)
2072 // -- sp[0] : thisArgument
2073 // -----------------------------------
2074
2075 // Create the list of arguments from the array-like argumentsList.
2076 {
2077 Label create_arguments, create_array, create_runtime, done_create;
2078 __ JumpIfSmi(a0, &create_runtime);
2079
2080 // Load the map of argumentsList into a2.
2081 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
2082
2083 // Load native context into a4.
2084 __ ld(a4, NativeContextMemOperand());
2085
2086 // Check if argumentsList is an (unmodified) arguments object.
2087 __ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2088 __ Branch(&create_arguments, eq, a2, Operand(at));
2089 __ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX));
2090 __ Branch(&create_arguments, eq, a2, Operand(at));
2091
2092 // Check if argumentsList is a fast JSArray.
2093 __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
2094 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
2095 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2096
2097 // Ask the runtime to create the list (actually a FixedArray).
2098 __ bind(&create_runtime);
2099 {
2100 FrameScope scope(masm, StackFrame::INTERNAL);
2101 __ Push(a1, a3, a0);
2102 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2103 __ mov(a0, v0);
2104 __ Pop(a1, a3);
2105 __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
2106 __ SmiUntag(a2);
2107 }
2108 __ Branch(&done_create);
2109
2110 // Try to create the list from an arguments object.
2111 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002112 __ ld(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002113 __ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset));
2114 __ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset));
2115 __ Branch(&create_runtime, ne, a2, Operand(at));
2116 __ SmiUntag(a2);
2117 __ mov(a0, a4);
2118 __ Branch(&done_create);
2119
2120 // Try to create the list from a JSArray object.
2121 __ bind(&create_array);
2122 __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset));
2123 __ DecodeField<Map::ElementsKindBits>(a2);
2124 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2125 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2126 STATIC_ASSERT(FAST_ELEMENTS == 2);
2127 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
2128 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2129 __ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2130 __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2131 __ SmiUntag(a2);
2132
2133 __ bind(&done_create);
2134 }
2135
2136 // Check for stack overflow.
2137 {
2138 // Check the stack for overflow. We are not trying to catch interruptions
2139 // (i.e. debug break and preemption) here, so check the "real stack limit".
2140 Label done;
2141 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
2142 // Make ip the space we have left. The stack might already be overflowed
2143 // here which will cause ip to become negative.
2144 __ Dsubu(a4, sp, a4);
2145 // Check if the arguments will overflow the stack.
2146 __ dsll(at, a2, kPointerSizeLog2);
2147 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison.
2148 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2149 __ bind(&done);
2150 }
2151
2152 // ----------- S t a t e -------------
2153 // -- a1 : target
2154 // -- a0 : args (a FixedArray built from argumentsList)
2155 // -- a2 : len (number of elements to push from args)
2156 // -- a3 : new.target (checked to be constructor or undefined)
2157 // -- sp[0] : thisArgument
2158 // -----------------------------------
2159
2160 // Push arguments onto the stack (thisArgument is already on the stack).
2161 {
2162 __ mov(a4, zero_reg);
2163 Label done, loop;
2164 __ bind(&loop);
2165 __ Branch(&done, eq, a4, Operand(a2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002166 __ Dlsa(at, a0, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002167 __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize));
2168 __ Push(at);
2169 __ Daddu(a4, a4, Operand(1));
2170 __ Branch(&loop);
2171 __ bind(&done);
2172 __ Move(a0, a4);
2173 }
2174
2175 // Dispatch to Call or Construct depending on whether new.target is undefined.
2176 {
2177 Label construct;
2178 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2179 __ Branch(&construct, ne, a3, Operand(at));
2180 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2181 __ bind(&construct);
2182 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2183 }
2184}
2185
Ben Murdoch097c5b22016-05-18 11:27:45 +01002186namespace {
2187
2188// Drops top JavaScript frame and an arguments adaptor frame below it (if
2189// present) preserving all the arguments prepared for current call.
2190// Does nothing if debugger is currently active.
2191// ES6 14.6.3. PrepareForTailCall
2192//
2193// Stack structure for the function g() tail calling f():
2194//
2195// ------- Caller frame: -------
2196// | ...
2197// | g()'s arg M
2198// | ...
2199// | g()'s arg 1
2200// | g()'s receiver arg
2201// | g()'s caller pc
2202// ------- g()'s frame: -------
2203// | g()'s caller fp <- fp
2204// | g()'s context
2205// | function pointer: g
2206// | -------------------------
2207// | ...
2208// | ...
2209// | f()'s arg N
2210// | ...
2211// | f()'s arg 1
2212// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2213// ----------------------
2214//
2215void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2216 Register scratch1, Register scratch2,
2217 Register scratch3) {
2218 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2219 Comment cmnt(masm, "[ PrepareForTailCall");
2220
Ben Murdochda12d292016-06-02 14:46:10 +01002221 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002222 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002223 ExternalReference is_tail_call_elimination_enabled =
2224 ExternalReference::is_tail_call_elimination_enabled_address(
2225 masm->isolate());
2226 __ li(at, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002227 __ lb(scratch1, MemOperand(at));
Ben Murdochda12d292016-06-02 14:46:10 +01002228 __ Branch(&done, eq, scratch1, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002229
2230 // Drop possible interpreter handler/stub frame.
2231 {
2232 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002233 __ ld(scratch3,
2234 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002235 __ Branch(&no_interpreter_frame, ne, scratch3,
2236 Operand(Smi::FromInt(StackFrame::STUB)));
2237 __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2238 __ bind(&no_interpreter_frame);
2239 }
2240
2241 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002242 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002243 Label no_arguments_adaptor, formal_parameter_count_loaded;
2244 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002245 __ ld(scratch3,
2246 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002247 __ Branch(&no_arguments_adaptor, ne, scratch3,
2248 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2249
Ben Murdochda12d292016-06-02 14:46:10 +01002250 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002251 __ mov(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002252 __ ld(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002253 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002254 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002255 __ Branch(&formal_parameter_count_loaded);
2256
2257 __ bind(&no_arguments_adaptor);
2258 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01002259 __ ld(scratch1,
2260 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002261 __ ld(scratch1,
2262 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002263 __ lw(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002264 FieldMemOperand(scratch1,
2265 SharedFunctionInfo::kFormalParameterCountOffset));
2266
2267 __ bind(&formal_parameter_count_loaded);
2268
Ben Murdochda12d292016-06-02 14:46:10 +01002269 ParameterCount callee_args_count(args_reg);
2270 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2271 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002272 __ bind(&done);
2273}
2274} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002275
2276// static
2277void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002278 ConvertReceiverMode mode,
2279 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002280 // ----------- S t a t e -------------
2281 // -- a0 : the number of arguments (not including the receiver)
2282 // -- a1 : the function to call (checked to be a JSFunction)
2283 // -----------------------------------
2284 __ AssertFunction(a1);
2285
2286 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2287 // Check that function is not a "classConstructor".
2288 Label class_constructor;
2289 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2290 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2291 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2292 __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2293
2294 // Enter the context of the function; ToObject has to run in the function
2295 // context, and we also need to take the global proxy from the function
2296 // context in case of conversion.
2297 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2298 SharedFunctionInfo::kStrictModeByteOffset);
2299 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2300 // We need to convert the receiver for non-native sloppy mode functions.
2301 Label done_convert;
2302 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2303 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2304 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2305 __ Branch(&done_convert, ne, at, Operand(zero_reg));
2306 {
2307 // ----------- S t a t e -------------
2308 // -- a0 : the number of arguments (not including the receiver)
2309 // -- a1 : the function to call (checked to be a JSFunction)
2310 // -- a2 : the shared function info.
2311 // -- cp : the function context.
2312 // -----------------------------------
2313
2314 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2315 // Patch receiver to global proxy.
2316 __ LoadGlobalProxy(a3);
2317 } else {
2318 Label convert_to_object, convert_receiver;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002319 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002320 __ ld(a3, MemOperand(at));
2321 __ JumpIfSmi(a3, &convert_to_object);
2322 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2323 __ GetObjectType(a3, a4, a4);
2324 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
2325 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2326 Label convert_global_proxy;
2327 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2328 &convert_global_proxy);
2329 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2330 __ bind(&convert_global_proxy);
2331 {
2332 // Patch receiver to global proxy.
2333 __ LoadGlobalProxy(a3);
2334 }
2335 __ Branch(&convert_receiver);
2336 }
2337 __ bind(&convert_to_object);
2338 {
2339 // Convert receiver using ToObject.
2340 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2341 // in the fast case? (fall back to AllocateInNewSpace?)
2342 FrameScope scope(masm, StackFrame::INTERNAL);
2343 __ SmiTag(a0);
2344 __ Push(a0, a1);
2345 __ mov(a0, a3);
2346 ToObjectStub stub(masm->isolate());
2347 __ CallStub(&stub);
2348 __ mov(a3, v0);
2349 __ Pop(a0, a1);
2350 __ SmiUntag(a0);
2351 }
2352 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2353 __ bind(&convert_receiver);
2354 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002355 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002356 __ sd(a3, MemOperand(at));
2357 }
2358 __ bind(&done_convert);
2359
2360 // ----------- S t a t e -------------
2361 // -- a0 : the number of arguments (not including the receiver)
2362 // -- a1 : the function to call (checked to be a JSFunction)
2363 // -- a2 : the shared function info.
2364 // -- cp : the function context.
2365 // -----------------------------------
2366
Ben Murdoch097c5b22016-05-18 11:27:45 +01002367 if (tail_call_mode == TailCallMode::kAllow) {
2368 PrepareForTailCall(masm, a0, t0, t1, t2);
2369 }
2370
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002371 __ lw(a2,
2372 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2373 ParameterCount actual(a0);
2374 ParameterCount expected(a2);
2375 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2376 CheckDebugStepCallWrapper());
2377
2378 // The function is a "classConstructor", need to raise an exception.
2379 __ bind(&class_constructor);
2380 {
2381 FrameScope frame(masm, StackFrame::INTERNAL);
2382 __ Push(a1);
2383 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2384 }
2385}
2386
2387
2388// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002389void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2390 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002391 // ----------- S t a t e -------------
2392 // -- a0 : the number of arguments (not including the receiver)
2393 // -- a1 : the function to call (checked to be a JSBoundFunction)
2394 // -----------------------------------
2395 __ AssertBoundFunction(a1);
2396
Ben Murdoch097c5b22016-05-18 11:27:45 +01002397 if (tail_call_mode == TailCallMode::kAllow) {
2398 PrepareForTailCall(masm, a0, t0, t1, t2);
2399 }
2400
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002401 // Patch the receiver to [[BoundThis]].
2402 {
2403 __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002404 __ Dlsa(a4, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002405 __ sd(at, MemOperand(a4));
2406 }
2407
2408 // Load [[BoundArguments]] into a2 and length of that into a4.
2409 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2410 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2411 __ SmiUntag(a4);
2412
2413 // ----------- S t a t e -------------
2414 // -- a0 : the number of arguments (not including the receiver)
2415 // -- a1 : the function to call (checked to be a JSBoundFunction)
2416 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2417 // -- a4 : the number of [[BoundArguments]]
2418 // -----------------------------------
2419
2420 // Reserve stack space for the [[BoundArguments]].
2421 {
2422 Label done;
2423 __ dsll(a5, a4, kPointerSizeLog2);
2424 __ Dsubu(sp, sp, Operand(a5));
2425 // Check the stack for overflow. We are not trying to catch interruptions
2426 // (i.e. debug break and preemption) here, so check the "real stack limit".
2427 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2428 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2429 // Restore the stack pointer.
2430 __ Daddu(sp, sp, Operand(a5));
2431 {
2432 FrameScope scope(masm, StackFrame::MANUAL);
2433 __ EnterFrame(StackFrame::INTERNAL);
2434 __ CallRuntime(Runtime::kThrowStackOverflow);
2435 }
2436 __ bind(&done);
2437 }
2438
2439 // Relocate arguments down the stack.
2440 {
2441 Label loop, done_loop;
2442 __ mov(a5, zero_reg);
2443 __ bind(&loop);
2444 __ Branch(&done_loop, gt, a5, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002445 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002446 __ ld(at, MemOperand(a6));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002447 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002448 __ sd(at, MemOperand(a6));
2449 __ Daddu(a4, a4, Operand(1));
2450 __ Daddu(a5, a5, Operand(1));
2451 __ Branch(&loop);
2452 __ bind(&done_loop);
2453 }
2454
2455 // Copy [[BoundArguments]] to the stack (below the arguments).
2456 {
2457 Label loop, done_loop;
2458 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2459 __ SmiUntag(a4);
2460 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2461 __ bind(&loop);
2462 __ Dsubu(a4, a4, Operand(1));
2463 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002464 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002465 __ ld(at, MemOperand(a5));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002466 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002467 __ sd(at, MemOperand(a5));
2468 __ Daddu(a0, a0, Operand(1));
2469 __ Branch(&loop);
2470 __ bind(&done_loop);
2471 }
2472
2473 // Call the [[BoundTargetFunction]] via the Call builtin.
2474 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2475 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2476 masm->isolate())));
2477 __ ld(at, MemOperand(at));
2478 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2479 __ Jump(at);
2480}
2481
2482
2483// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002484void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2485 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002486 // ----------- S t a t e -------------
2487 // -- a0 : the number of arguments (not including the receiver)
2488 // -- a1 : the target to call (can be any Object).
2489 // -----------------------------------
2490
2491 Label non_callable, non_function, non_smi;
2492 __ JumpIfSmi(a1, &non_callable);
2493 __ bind(&non_smi);
2494 __ GetObjectType(a1, t1, t2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002495 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002496 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002497 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002498 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002499
2500 // Check if target has a [[Call]] internal method.
2501 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2502 __ And(t1, t1, Operand(1 << Map::kIsCallable));
2503 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2504
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002505 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2506
Ben Murdoch097c5b22016-05-18 11:27:45 +01002507 // 0. Prepare for tail call if necessary.
2508 if (tail_call_mode == TailCallMode::kAllow) {
2509 PrepareForTailCall(masm, a0, t0, t1, t2);
2510 }
2511
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002512 // 1. Runtime fallback for Proxy [[Call]].
2513 __ Push(a1);
2514 // Increase the arguments size to include the pushed function and the
2515 // existing receiver on the stack.
2516 __ Daddu(a0, a0, 2);
2517 // Tail-call to the runtime.
2518 __ JumpToExternalReference(
2519 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2520
2521 // 2. Call to something else, which might have a [[Call]] internal method (if
2522 // not we raise an exception).
2523 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002524 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002525 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002526 __ sd(a1, MemOperand(at));
2527 // Let the "call_as_function_delegate" take care of the rest.
2528 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2529 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002530 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002531 RelocInfo::CODE_TARGET);
2532
2533 // 3. Call to something that is not callable.
2534 __ bind(&non_callable);
2535 {
2536 FrameScope scope(masm, StackFrame::INTERNAL);
2537 __ Push(a1);
2538 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2539 }
2540}
2541
2542
2543void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2544 // ----------- S t a t e -------------
2545 // -- a0 : the number of arguments (not including the receiver)
2546 // -- a1 : the constructor to call (checked to be a JSFunction)
2547 // -- a3 : the new target (checked to be a constructor)
2548 // -----------------------------------
2549 __ AssertFunction(a1);
2550
2551 // Calling convention for function specific ConstructStubs require
2552 // a2 to contain either an AllocationSite or undefined.
2553 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2554
2555 // Tail call to the function-specific construct stub (still in the caller
2556 // context at this point).
2557 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2558 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
2559 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
2560 __ Jump(at);
2561}
2562
2563
2564// static
2565void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2566 // ----------- S t a t e -------------
2567 // -- a0 : the number of arguments (not including the receiver)
2568 // -- a1 : the function to call (checked to be a JSBoundFunction)
2569 // -- a3 : the new target (checked to be a constructor)
2570 // -----------------------------------
2571 __ AssertBoundFunction(a1);
2572
2573 // Load [[BoundArguments]] into a2 and length of that into a4.
2574 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2575 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2576 __ SmiUntag(a4);
2577
2578 // ----------- S t a t e -------------
2579 // -- a0 : the number of arguments (not including the receiver)
2580 // -- a1 : the function to call (checked to be a JSBoundFunction)
2581 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2582 // -- a3 : the new target (checked to be a constructor)
2583 // -- a4 : the number of [[BoundArguments]]
2584 // -----------------------------------
2585
2586 // Reserve stack space for the [[BoundArguments]].
2587 {
2588 Label done;
2589 __ dsll(a5, a4, kPointerSizeLog2);
2590 __ Dsubu(sp, sp, Operand(a5));
2591 // Check the stack for overflow. We are not trying to catch interruptions
2592 // (i.e. debug break and preemption) here, so check the "real stack limit".
2593 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2594 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2595 // Restore the stack pointer.
2596 __ Daddu(sp, sp, Operand(a5));
2597 {
2598 FrameScope scope(masm, StackFrame::MANUAL);
2599 __ EnterFrame(StackFrame::INTERNAL);
2600 __ CallRuntime(Runtime::kThrowStackOverflow);
2601 }
2602 __ bind(&done);
2603 }
2604
2605 // Relocate arguments down the stack.
2606 {
2607 Label loop, done_loop;
2608 __ mov(a5, zero_reg);
2609 __ bind(&loop);
2610 __ Branch(&done_loop, ge, a5, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002611 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002612 __ ld(at, MemOperand(a6));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002613 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002614 __ sd(at, MemOperand(a6));
2615 __ Daddu(a4, a4, Operand(1));
2616 __ Daddu(a5, a5, Operand(1));
2617 __ Branch(&loop);
2618 __ bind(&done_loop);
2619 }
2620
2621 // Copy [[BoundArguments]] to the stack (below the arguments).
2622 {
2623 Label loop, done_loop;
2624 __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2625 __ SmiUntag(a4);
2626 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2627 __ bind(&loop);
2628 __ Dsubu(a4, a4, Operand(1));
2629 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002630 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002631 __ ld(at, MemOperand(a5));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002632 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002633 __ sd(at, MemOperand(a5));
2634 __ Daddu(a0, a0, Operand(1));
2635 __ Branch(&loop);
2636 __ bind(&done_loop);
2637 }
2638
2639 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2640 {
2641 Label skip_load;
2642 __ Branch(&skip_load, ne, a1, Operand(a3));
2643 __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2644 __ bind(&skip_load);
2645 }
2646
2647 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2648 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2649 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2650 __ ld(at, MemOperand(at));
2651 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2652 __ Jump(at);
2653}
2654
2655
2656// static
2657void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2658 // ----------- S t a t e -------------
2659 // -- a0 : the number of arguments (not including the receiver)
2660 // -- a1 : the constructor to call (checked to be a JSProxy)
2661 // -- a3 : the new target (either the same as the constructor or
2662 // the JSFunction on which new was invoked initially)
2663 // -----------------------------------
2664
2665 // Call into the Runtime for Proxy [[Construct]].
2666 __ Push(a1, a3);
2667 // Include the pushed new_target, constructor and the receiver.
2668 __ Daddu(a0, a0, Operand(3));
2669 // Tail-call to the runtime.
2670 __ JumpToExternalReference(
2671 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2672}
2673
2674
2675// static
2676void Builtins::Generate_Construct(MacroAssembler* masm) {
2677 // ----------- S t a t e -------------
2678 // -- a0 : the number of arguments (not including the receiver)
2679 // -- a1 : the constructor to call (can be any Object)
2680 // -- a3 : the new target (either the same as the constructor or
2681 // the JSFunction on which new was invoked initially)
2682 // -----------------------------------
2683
2684 // Check if target is a Smi.
2685 Label non_constructor;
2686 __ JumpIfSmi(a1, &non_constructor);
2687
2688 // Dispatch based on instance type.
2689 __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2690 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2691 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2692 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2693
2694 // Check if target has a [[Construct]] internal method.
2695 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2696 __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2697 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2698
2699 // Only dispatch to bound functions after checking whether they are
2700 // constructors.
2701 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2702 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2703
2704 // Only dispatch to proxies after checking whether they are constructors.
2705 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2706 eq, t2, Operand(JS_PROXY_TYPE));
2707
2708 // Called Construct on an exotic Object with a [[Construct]] internal method.
2709 {
2710 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002711 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002712 __ sd(a1, MemOperand(at));
2713 // Let the "call_as_constructor_delegate" take care of the rest.
2714 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2715 __ Jump(masm->isolate()->builtins()->CallFunction(),
2716 RelocInfo::CODE_TARGET);
2717 }
2718
2719 // Called Construct on an Object that doesn't have a [[Construct]] internal
2720 // method.
2721 __ bind(&non_constructor);
2722 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2723 RelocInfo::CODE_TARGET);
2724}
2725
Ben Murdochc5610432016-08-08 18:44:38 +01002726// static
2727void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2728 // ----------- S t a t e -------------
2729 // -- a0 : requested object size (untagged)
2730 // -- ra : return address
2731 // -----------------------------------
2732 __ SmiTag(a0);
2733 __ Push(a0);
2734 __ Move(cp, Smi::FromInt(0));
2735 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2736}
2737
2738// static
2739void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2740 // ----------- S t a t e -------------
2741 // -- a0 : requested object size (untagged)
2742 // -- ra : return address
2743 // -----------------------------------
2744 __ SmiTag(a0);
2745 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2746 __ Push(a0, a1);
2747 __ Move(cp, Smi::FromInt(0));
2748 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2749}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002750
Ben Murdoch61f157c2016-09-16 13:49:30 +01002751// static
2752void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2753 // The StringToNumber stub takes on argument in a0.
2754 __ AssertString(a0);
2755
2756 // Check if string has a cached array index.
2757 Label runtime;
2758 __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset));
2759 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
2760 __ Branch(&runtime, ne, at, Operand(zero_reg));
2761 __ IndexFromHash(a2, v0);
2762 __ Ret();
2763
2764 __ bind(&runtime);
2765 {
2766 FrameScope frame(masm, StackFrame::INTERNAL);
2767 // Push argument.
2768 __ Push(a0);
2769 // We cannot use a tail call here because this builtin can also be called
2770 // from wasm.
2771 __ CallRuntime(Runtime::kStringToNumber);
2772 }
2773 __ Ret();
2774}
2775
2776// static
2777void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2778 // The ToNumber stub takes one argument in a0.
2779 Label not_smi;
2780 __ JumpIfNotSmi(a0, &not_smi);
2781 __ Ret(USE_DELAY_SLOT);
2782 __ mov(v0, a0);
2783 __ bind(&not_smi);
2784
2785 Label not_heap_number;
2786 __ GetObjectType(a0, a1, a1);
2787 // a0: receiver
2788 // a1: receiver instance type
2789 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2790 __ Ret(USE_DELAY_SLOT);
2791 __ mov(v0, a0);
2792 __ bind(&not_heap_number);
2793
2794 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2795 RelocInfo::CODE_TARGET);
2796}
2797
2798// static
2799void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2800 // The NonNumberToNumber stub takes on argument in a0.
2801 __ AssertNotNumber(a0);
2802
2803 Label not_string;
2804 __ GetObjectType(a0, a1, a1);
2805 // a0: receiver
2806 // a1: receiver instance type
2807 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
2808 __ Jump(masm->isolate()->builtins()->StringToNumber(),
2809 RelocInfo::CODE_TARGET);
2810 __ bind(&not_string);
2811
2812 Label not_oddball;
2813 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2814 __ Ret(USE_DELAY_SLOT);
2815 __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); // In delay slot.
2816 __ bind(&not_oddball);
2817 {
2818 FrameScope frame(masm, StackFrame::INTERNAL);
2819 // Push argument.
2820 __ Push(a0);
2821 // We cannot use a tail call here because this builtin can also be called
2822 // from wasm.
2823 __ CallRuntime(Runtime::kToNumber);
2824 }
2825 __ Ret();
2826}
2827
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002828void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2829 // State setup as expected by MacroAssembler::InvokePrologue.
2830 // ----------- S t a t e -------------
2831 // -- a0: actual arguments count
2832 // -- a1: function (passed through to callee)
2833 // -- a2: expected arguments count
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002834 // -- a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002835 // -----------------------------------
2836
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002837 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002838
2839 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002840 __ Branch(&dont_adapt_arguments, eq,
2841 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2842 // We use Uless as the number of argument should always be greater than 0.
2843 __ Branch(&too_few, Uless, a0, Operand(a2));
2844
2845 { // Enough parameters: actual >= expected.
2846 // a0: actual number of arguments as a smi
2847 // a1: function
2848 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002849 // a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002850 __ bind(&enough);
2851 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002852 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002853
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002854 // Calculate copy start address into a0 and copy end address into a4.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002855 __ SmiScale(a0, a0, kPointerSizeLog2);
2856 __ Daddu(a0, fp, a0);
2857 // Adjust for return address and receiver.
2858 __ Daddu(a0, a0, Operand(2 * kPointerSize));
2859 // Compute copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002860 __ dsll(a4, a2, kPointerSizeLog2);
2861 __ dsubu(a4, a0, a4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002862
2863 // Copy the arguments (including the receiver) to the new stack frame.
2864 // a0: copy start address
2865 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002866 // a2: expected number of arguments
2867 // a3: new target (passed through to callee)
2868 // a4: copy end address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002869
2870 Label copy;
2871 __ bind(&copy);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002872 __ ld(a5, MemOperand(a0));
2873 __ push(a5);
2874 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002875 __ daddiu(a0, a0, -kPointerSize); // In delay slot.
2876
2877 __ jmp(&invoke);
2878 }
2879
2880 { // Too few parameters: Actual < expected.
2881 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002882 EnterArgumentsAdaptorFrame(masm);
2883 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2884
2885 // Calculate copy start address into a0 and copy end address into a7.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002886 // a0: actual number of arguments as a smi
2887 // a1: function
2888 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002889 // a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002890 __ SmiScale(a0, a0, kPointerSizeLog2);
2891 __ Daddu(a0, fp, a0);
2892 // Adjust for return address and receiver.
2893 __ Daddu(a0, a0, Operand(2 * kPointerSize));
2894 // Compute copy end address. Also adjust for return address.
2895 __ Daddu(a7, fp, kPointerSize);
2896
2897 // Copy the arguments (including the receiver) to the new stack frame.
2898 // a0: copy start address
2899 // a1: function
2900 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002901 // a3: new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002902 // a7: copy end address
2903 Label copy;
2904 __ bind(&copy);
2905 __ ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver.
2906 __ Dsubu(sp, sp, kPointerSize);
2907 __ Dsubu(a0, a0, kPointerSize);
2908 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
2909 __ sd(a4, MemOperand(sp)); // In the delay slot.
2910
2911 // Fill the remaining expected arguments with undefined.
2912 // a1: function
2913 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002914 // a3: new target (passed through to callee)
2915 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002916 __ dsll(a6, a2, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002917 __ Dsubu(a4, fp, Operand(a6));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002918 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002919 __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2920 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002921
2922 Label fill;
2923 __ bind(&fill);
2924 __ Dsubu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002925 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
2926 __ sd(a5, MemOperand(sp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002927 }
2928
2929 // Call the entry point.
2930 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002931 __ mov(a0, a2);
2932 // a0 : expected number of arguments
2933 // a1 : function (passed through to callee)
2934 // a3: new target (passed through to callee)
2935 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2936 __ Call(a4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002937
2938 // Store offset of return address for deoptimizer.
2939 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2940
2941 // Exit frame and return.
2942 LeaveArgumentsAdaptorFrame(masm);
2943 __ Ret();
2944
2945
2946 // -------------------------------------------
2947 // Don't adapt arguments.
2948 // -------------------------------------------
2949 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002950 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2951 __ Jump(a4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002952
2953 __ bind(&stack_overflow);
2954 {
2955 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002956 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002957 __ break_(0xCC);
2958 }
2959}
2960
2961
2962#undef __
2963
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002964} // namespace internal
2965} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002966
2967#endif // V8_TARGET_ARCH_MIPS64