blob: 7d9d0806fc3aac2b8114e6d434db5cd9a094f258 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/debug/debug.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/deoptimizer.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/full-codegen/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
Andrei Popescu31002712010-02-23 13:46:05 +000013
14namespace v8 {
15namespace internal {
16
17
18#define __ ACCESS_MASM(masm)
19
Ben Murdoch61f157c2016-09-16 13:49:30 +010020void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
Ben Murdoch257744e2011-11-30 15:57:28 +000021 // ----------- S t a t e -------------
22 // -- a0 : number of arguments excluding receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023 // -- a1 : target
24 // -- a3 : new.target
Ben Murdoch257744e2011-11-30 15:57:28 +000025 // -- sp[0] : last argument
26 // -- ...
27 // -- sp[4 * (argc - 1)] : first argument
28 // -- sp[4 * agrc] : receiver
29 // -----------------------------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030 __ AssertFunction(a1);
31
32 // Make sure we operate in the context of the called function (for example
33 // ConstructStubs implemented in C++ will be run in the context of the caller
34 // instead of the callee, due to the way that [[Construct]] is defined for
35 // ordinary functions).
36 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +000037
38 // Insert extra arguments.
Ben Murdoch61f157c2016-09-16 13:49:30 +010039 const int num_extra_args = 2;
40 __ Push(a1, a3);
Ben Murdoch257744e2011-11-30 15:57:28 +000041
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042 // JumpToExternalReference expects a0 to contain the number of arguments
Ben Murdoch257744e2011-11-30 15:57:28 +000043 // including the receiver and the extra arguments.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040044 __ Addu(a0, a0, num_extra_args + 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000045
Ben Murdoch257744e2011-11-30 15:57:28 +000046 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
47}
48
49
Ben Murdoch3ef787d2012-04-12 10:51:47 +010050// Load the built-in InternalArray function from the current context.
51static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
52 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 // Load the InternalArray function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010055}
56
57
Ben Murdoch257744e2011-11-30 15:57:28 +000058// Load the built-in Array function from the current context.
59static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 // Load the Array function from the native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
Ben Murdoch257744e2011-11-30 15:57:28 +000062}
63
64
Ben Murdoch3ef787d2012-04-12 10:51:47 +010065void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
66 // ----------- S t a t e -------------
67 // -- a0 : number of arguments
68 // -- ra : return address
69 // -- sp[...]: constructor arguments
70 // -----------------------------------
71 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
72
73 // Get the InternalArray function.
74 GenerateLoadInternalArrayFunction(masm, a1);
75
76 if (FLAG_debug_code) {
77 // Initial map for the builtin InternalArray functions should be maps.
78 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 __ SmiTst(a2, t0);
80 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +010081 t0, Operand(zero_reg));
82 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000083 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +010084 t0, Operand(MAP_TYPE));
85 }
86
87 // Run the native code for the InternalArray function called as a normal
88 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 // Tail call a stub.
90 InternalArrayConstructorStub stub(masm->isolate());
91 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +000092}
93
94
95void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +000096 // ----------- S t a t e -------------
97 // -- a0 : number of arguments
98 // -- ra : return address
99 // -- sp[...]: constructor arguments
100 // -----------------------------------
101 Label generic_array_code;
102
103 // Get the Array function.
104 GenerateLoadArrayFunction(masm, a1);
105
106 if (FLAG_debug_code) {
107 // Initial map for the builtin Array functions should be maps.
108 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000109 __ SmiTst(a2, t0);
110 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
Ben Murdoch257744e2011-11-30 15:57:28 +0000111 t0, Operand(zero_reg));
112 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000113 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
Ben Murdoch257744e2011-11-30 15:57:28 +0000114 t0, Operand(MAP_TYPE));
115 }
116
117 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 // Tail call a stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 __ mov(a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
121 ArrayConstructorStub stub(masm->isolate());
122 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +0000123}
124
125
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126// static
Ben Murdoch097c5b22016-05-18 11:27:45 +0100127void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
128 // ----------- S t a t e -------------
129 // -- a0 : number of arguments
Ben Murdoch61f157c2016-09-16 13:49:30 +0100130 // -- a1 : function
131 // -- cp : context
Ben Murdoch097c5b22016-05-18 11:27:45 +0100132 // -- ra : return address
133 // -- sp[(argc - n) * 8] : arg[n] (zero-based)
134 // -- sp[(argc + 1) * 8] : receiver
135 // -----------------------------------
Ben Murdoch097c5b22016-05-18 11:27:45 +0100136 Heap::RootListIndex const root_index =
137 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
138 : Heap::kMinusInfinityValueRootIndex;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139
140 // Load the accumulator with the default return value (either -Infinity or
Ben Murdoch61f157c2016-09-16 13:49:30 +0100141 // +Infinity), with the tagged value in t2 and the double value in f0.
142 __ LoadRoot(t2, root_index);
143 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
Ben Murdochda12d292016-06-02 14:46:10 +0100144 __ Addu(a3, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100145
146 Label done_loop, loop;
147 __ bind(&loop);
148 {
149 // Check if all parameters done.
150 __ Subu(a0, a0, Operand(1));
151 __ Branch(&done_loop, lt, a0, Operand(zero_reg));
152
153 // Load the next parameter tagged value into a2.
154 __ Lsa(at, sp, a0, kPointerSizeLog2);
155 __ lw(a2, MemOperand(at));
156
157 // Load the double value of the parameter into f2, maybe converting the
Ben Murdoch61f157c2016-09-16 13:49:30 +0100158 // parameter to a number first using the ToNumber builtin if necessary.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100159 Label convert, convert_smi, convert_number, done_convert;
160 __ bind(&convert);
161 __ JumpIfSmi(a2, &convert_smi);
162 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset));
163 __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number);
164 {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100165 // Parameter is not a Number, use the ToNumber builtin to convert it.
166 FrameScope scope(masm, StackFrame::MANUAL);
167 __ Push(ra, fp);
168 __ Move(fp, sp);
169 __ Push(cp, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100170 __ SmiTag(a0);
171 __ SmiTag(a3);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100172 __ Push(a0, t2, a3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100173 __ mov(a0, a2);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100174 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100175 __ mov(a2, v0);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100176 __ Pop(a0, t2, a3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100177 {
178 // Restore the double accumulator value (f0).
179 Label restore_smi, done_restore;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100180 __ JumpIfSmi(t2, &restore_smi);
181 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100182 __ jmp(&done_restore);
183 __ bind(&restore_smi);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100184 __ SmiToDoubleFPURegister(t2, f0, t0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100185 __ bind(&done_restore);
186 }
187 __ SmiUntag(a3);
188 __ SmiUntag(a0);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100189 __ Pop(cp, a1);
190 __ Pop(ra, fp);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100191 }
192 __ jmp(&convert);
193 __ bind(&convert_number);
194 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
195 __ jmp(&done_convert);
196 __ bind(&convert_smi);
197 __ SmiToDoubleFPURegister(a2, f2, t0);
198 __ bind(&done_convert);
199
Ben Murdochda12d292016-06-02 14:46:10 +0100200 // Perform the actual comparison with using Min/Max macro instructions the
201 // accumulator value on the left hand side (f0) and the next parameter value
202 // on the right hand side (f2).
203 // We need to work out which HeapNumber (or smi) the result came from.
204 Label compare_nan, set_value;
205 __ BranchF(nullptr, &compare_nan, eq, f0, f2);
206 __ Move(t0, t1, f0);
207 if (kind == MathMaxMinKind::kMin) {
208 __ MinNaNCheck_d(f0, f0, f2);
209 } else {
210 DCHECK(kind == MathMaxMinKind::kMax);
211 __ MaxNaNCheck_d(f0, f0, f2);
212 }
213 __ Move(at, t8, f0);
214 __ Branch(&set_value, ne, t0, Operand(at));
215 __ Branch(&set_value, ne, t1, Operand(t8));
216 __ jmp(&loop);
217 __ bind(&set_value);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100218 __ mov(t2, a2);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100219 __ jmp(&loop);
220
221 // At least one side is NaN, which means that the result will be NaN too.
222 __ bind(&compare_nan);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100223 __ LoadRoot(t2, Heap::kNanValueRootIndex);
224 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100225 __ jmp(&loop);
226 }
227
228 __ bind(&done_loop);
229 __ Lsa(sp, sp, a3, kPointerSizeLog2);
Ben Murdochda12d292016-06-02 14:46:10 +0100230 __ Ret(USE_DELAY_SLOT);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100231 __ mov(v0, t2); // In delay slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100232}
233
234// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000235void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000236 // ----------- S t a t e -------------
237 // -- a0 : number of arguments
238 // -- a1 : constructor function
239 // -- ra : return address
240 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
241 // -- sp[argc * 4] : receiver
242 // -----------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +0000243
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244 // 1. Load the first argument into a0 and get rid of the rest (including the
245 // receiver).
246 Label no_arguments;
247 {
248 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
249 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100250 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 __ lw(a0, MemOperand(sp));
252 __ Drop(2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000253 }
254
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 // 2a. Convert first argument to number.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100256 __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdoch257744e2011-11-30 15:57:28 +0000257
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 // 2b. No arguments, return +0.
259 __ bind(&no_arguments);
260 __ Move(v0, Smi::FromInt(0));
261 __ DropAndRet(1);
262}
Ben Murdoch257744e2011-11-30 15:57:28 +0000263
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000264
265// static
266void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000267 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 // -- a0 : number of arguments
269 // -- a1 : constructor function
270 // -- a3 : new target
271 // -- ra : return address
272 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
273 // -- sp[argc * 4] : receiver
Ben Murdoch257744e2011-11-30 15:57:28 +0000274 // -----------------------------------
275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 // 1. Make sure we operate in the context of the called function.
277 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000278
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 // 2. Load the first argument into a0 and get rid of the rest (including the
280 // receiver).
281 {
282 Label no_arguments, done;
283 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
284 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100285 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286 __ lw(a0, MemOperand(sp));
287 __ Drop(2);
288 __ jmp(&done);
289 __ bind(&no_arguments);
290 __ Move(a0, Smi::FromInt(0));
291 __ Drop(1);
292 __ bind(&done);
Ben Murdoch257744e2011-11-30 15:57:28 +0000293 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000294
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000295 // 3. Make sure a0 is a number.
296 {
297 Label done_convert;
298 __ JumpIfSmi(a0, &done_convert);
299 __ GetObjectType(a0, a2, a2);
300 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
301 {
302 FrameScope scope(masm, StackFrame::INTERNAL);
303 __ Push(a1, a3);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100304 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305 __ Move(a0, v0);
306 __ Pop(a1, a3);
307 }
308 __ bind(&done_convert);
309 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000310
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000311 // 4. Check if new target and constructor differ.
312 Label new_object;
313 __ Branch(&new_object, ne, a1, Operand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +0000314
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315 // 5. Allocate a JSValue wrapper for the number.
316 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
Ben Murdoch257744e2011-11-30 15:57:28 +0000317 __ Ret();
318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 // 6. Fallback to the runtime to create new object.
320 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100321 {
322 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100323 __ Push(a0); // first argument
324 FastNewObjectStub stub(masm->isolate());
325 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 __ Pop(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100327 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000328 __ Ret(USE_DELAY_SLOT);
329 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot
330}
Ben Murdoch257744e2011-11-30 15:57:28 +0000331
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332
333// static
334void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
335 // ----------- S t a t e -------------
336 // -- a0 : number of arguments
337 // -- a1 : constructor function
338 // -- ra : return address
339 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
340 // -- sp[argc * 4] : receiver
341 // -----------------------------------
342
343 // 1. Load the first argument into a0 and get rid of the rest (including the
344 // receiver).
345 Label no_arguments;
346 {
347 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
348 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100349 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000350 __ lw(a0, MemOperand(sp));
351 __ Drop(2);
352 }
353
354 // 2a. At least one argument, return a0 if it's a string, otherwise
355 // dispatch to appropriate conversion.
356 Label to_string, symbol_descriptive_string;
357 {
358 __ JumpIfSmi(a0, &to_string);
359 __ GetObjectType(a0, a1, a1);
360 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
361 __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
362 __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
363 __ Branch(&to_string, gt, a1, Operand(zero_reg));
364 __ Ret(USE_DELAY_SLOT);
365 __ mov(v0, a0);
366 }
367
368 // 2b. No arguments, return the empty string (and pop the receiver).
Ben Murdoch257744e2011-11-30 15:57:28 +0000369 __ bind(&no_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 {
371 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
372 __ DropAndRet(1);
373 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000374
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000375 // 3a. Convert a0 to a string.
376 __ bind(&to_string);
377 {
378 ToStringStub stub(masm->isolate());
379 __ TailCallStub(&stub);
380 }
381
382 // 3b. Convert symbol in a0 to a string.
383 __ bind(&symbol_descriptive_string);
384 {
385 __ Push(a0);
386 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
387 }
388}
389
390
391// static
392void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
393 // ----------- S t a t e -------------
394 // -- a0 : number of arguments
395 // -- a1 : constructor function
396 // -- a3 : new target
397 // -- ra : return address
398 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
399 // -- sp[argc * 4] : receiver
400 // -----------------------------------
401
402 // 1. Make sure we operate in the context of the called function.
403 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
404
405 // 2. Load the first argument into a0 and get rid of the rest (including the
406 // receiver).
407 {
408 Label no_arguments, done;
409 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
410 __ Subu(a0, a0, Operand(1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100411 __ Lsa(sp, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000412 __ lw(a0, MemOperand(sp));
413 __ Drop(2);
414 __ jmp(&done);
415 __ bind(&no_arguments);
416 __ LoadRoot(a0, Heap::kempty_stringRootIndex);
417 __ Drop(1);
418 __ bind(&done);
419 }
420
421 // 3. Make sure a0 is a string.
422 {
423 Label convert, done_convert;
424 __ JumpIfSmi(a0, &convert);
425 __ GetObjectType(a0, a2, a2);
426 __ And(t0, a2, Operand(kIsNotStringMask));
427 __ Branch(&done_convert, eq, t0, Operand(zero_reg));
428 __ bind(&convert);
429 {
430 FrameScope scope(masm, StackFrame::INTERNAL);
431 ToStringStub stub(masm->isolate());
432 __ Push(a1, a3);
433 __ CallStub(&stub);
434 __ Move(a0, v0);
435 __ Pop(a1, a3);
436 }
437 __ bind(&done_convert);
438 }
439
440 // 4. Check if new target and constructor differ.
441 Label new_object;
442 __ Branch(&new_object, ne, a1, Operand(a3));
443
444 // 5. Allocate a JSValue wrapper for the string.
445 __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
446 __ Ret();
447
448 // 6. Fallback to the runtime to create new object.
449 __ bind(&new_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100450 {
451 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100452 __ Push(a0); // first argument
453 FastNewObjectStub stub(masm->isolate());
454 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 __ Pop(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457 __ Ret(USE_DELAY_SLOT);
458 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); // In delay slot
Steve Block44f0eee2011-05-26 01:26:41 +0100459}
460
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000461static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
462 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
463 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
464 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
465 __ Jump(at);
466}
467
Ben Murdoch097c5b22016-05-18 11:27:45 +0100468static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
469 Runtime::FunctionId function_id) {
470 // ----------- S t a t e -------------
471 // -- a0 : argument count (preserved for callee)
472 // -- a1 : target function (preserved for callee)
473 // -- a3 : new target (preserved for callee)
474 // -----------------------------------
475 {
476 FrameScope scope(masm, StackFrame::INTERNAL);
477 // Push a copy of the target function and the new target.
478 // Push function as parameter to the runtime call.
479 __ SmiTag(a0);
480 __ Push(a0, a1, a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481
Ben Murdoch097c5b22016-05-18 11:27:45 +0100482 __ CallRuntime(function_id, 1);
483
484 // Restore target function and new target.
485 __ Pop(a0, a1, a3);
486 __ SmiUntag(a0);
487 }
488
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000489 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
490 __ Jump(at);
491}
492
493
494void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
495 // Checking whether the queued function is ready for install is optional,
496 // since we come across interrupts and stack checks elsewhere. However,
497 // not checking may delay installing ready functions, and always checking
498 // would be quite expensive. A good compromise is to first check against
499 // stack limit as a cue for an interrupt signal.
500 Label ok;
501 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
502 __ Branch(&ok, hs, sp, Operand(t0));
503
Ben Murdoch097c5b22016-05-18 11:27:45 +0100504 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505
506 __ bind(&ok);
507 GenerateTailCallToSharedCode(masm);
508}
509
510
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100511static void Generate_JSConstructStubHelper(MacroAssembler* masm,
512 bool is_api_function,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100513 bool create_implicit_receiver,
514 bool check_derived_construct) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000515 // ----------- S t a t e -------------
516 // -- a0 : number of arguments
517 // -- a1 : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000518 // -- a2 : allocation site or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519 // -- a3 : new target
Ben Murdochda12d292016-06-02 14:46:10 +0100520 // -- cp : context
Ben Murdoch257744e2011-11-30 15:57:28 +0000521 // -- ra : return address
522 // -- sp[...]: constructor arguments
523 // -----------------------------------
524
Ben Murdoch257744e2011-11-30 15:57:28 +0000525 Isolate* isolate = masm->isolate();
526
Ben Murdoch257744e2011-11-30 15:57:28 +0000527 // Enter a construct frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100528 {
529 FrameScope scope(masm, StackFrame::CONSTRUCT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000530
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000531 // Preserve the incoming parameters on the stack.
532 __ AssertUndefinedOrAllocationSite(a2, t0);
533 __ SmiTag(a0);
Ben Murdochda12d292016-06-02 14:46:10 +0100534 __ Push(cp, a2, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000535
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 if (create_implicit_receiver) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100537 // Allocate the new receiver object.
538 __ Push(a1, a3);
539 FastNewObjectStub stub(masm->isolate());
540 __ CallStub(&stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000541 __ mov(t4, v0);
542 __ Pop(a1, a3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100543
Ben Murdoch097c5b22016-05-18 11:27:45 +0100544 // ----------- S t a t e -------------
545 // -- a1: constructor function
546 // -- a3: new target
547 // -- t0: newly allocated object
548 // -----------------------------------
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100549
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000550 // Retrieve smi-tagged arguments count from the stack.
551 __ lw(a0, MemOperand(sp));
Ben Murdoch257744e2011-11-30 15:57:28 +0000552 }
553
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 __ SmiUntag(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 if (create_implicit_receiver) {
557 // Push the allocated receiver to the stack. We need two copies
558 // because we may have to return the original one and the calling
559 // conventions dictate that the called function pops the receiver.
560 __ Push(t4, t4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000562 __ PushRoot(Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000564
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100565 // Set up pointer to last argument.
566 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000567
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100568 // Copy arguments and receiver to the expression stack.
569 // a0: number of arguments
570 // a1: constructor function
571 // a2: address of last argument (caller sp)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000572 // a3: new target
573 // t4: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100574 // sp[0]: receiver
575 // sp[1]: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000576 // sp[2]: number of arguments (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100577 Label loop, entry;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000578 __ SmiTag(t4, a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100579 __ jmp(&entry);
580 __ bind(&loop);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100581 __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 __ lw(t1, MemOperand(t0));
583 __ push(t1);
584 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000585 __ Addu(t4, t4, Operand(-2));
586 __ Branch(&loop, greater_equal, t4, Operand(zero_reg));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000587
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100588 // Call the function.
589 // a0: number of arguments
590 // a1: constructor function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 // a3: new target
Ben Murdochc5610432016-08-08 18:44:38 +0100592 ParameterCount actual(a0);
593 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
594 CheckDebugStepCallWrapper());
Ben Murdoch257744e2011-11-30 15:57:28 +0000595
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100596 // Store offset of return address for deoptimizer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000597 if (create_implicit_receiver && !is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100598 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
599 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000600
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100601 // Restore context from the frame.
Ben Murdochda12d292016-06-02 14:46:10 +0100602 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000603
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000604 if (create_implicit_receiver) {
605 // If the result is an object (in the ECMA sense), we should get rid
606 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
607 // on page 74.
608 Label use_receiver, exit;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000610 // If the result is a smi, it is *not* an object in the ECMA sense.
611 // v0: result
612 // sp[0]: receiver (newly allocated object)
613 // sp[1]: number of arguments (smi-tagged)
614 __ JumpIfSmi(v0, &use_receiver);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100615
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616 // If the type of the result (stored in its map) is less than
617 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
618 __ GetObjectType(v0, a1, a3);
619 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000621 // Throw away the result of the constructor invocation and use the
622 // on-stack receiver as the result.
623 __ bind(&use_receiver);
624 __ lw(v0, MemOperand(sp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100625
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000626 // Remove receiver from the stack, remove caller arguments, and
627 // return.
628 __ bind(&exit);
629 // v0: result
630 // sp[0]: receiver (newly allocated object)
631 // sp[1]: number of arguments (smi-tagged)
632 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
633 } else {
634 __ lw(a1, MemOperand(sp));
635 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100636
637 // Leave construct frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000638 }
639
Ben Murdoch097c5b22016-05-18 11:27:45 +0100640 // ES6 9.2.2. Step 13+
641 // Check that the result is not a Smi, indicating that the constructor result
642 // from a derived class is neither undefined nor an Object.
643 if (check_derived_construct) {
644 Label dont_throw;
645 __ JumpIfNotSmi(v0, &dont_throw);
646 {
647 FrameScope scope(masm, StackFrame::INTERNAL);
648 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
649 }
650 __ bind(&dont_throw);
651 }
652
653 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000654 __ Addu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000655 if (create_implicit_receiver) {
656 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
657 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000658 __ Ret();
Andrei Popescu31002712010-02-23 13:46:05 +0000659}
660
661
662void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100663 Generate_JSConstructStubHelper(masm, false, true, false);
Andrei Popescu31002712010-02-23 13:46:05 +0000664}
665
666
667void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100668 Generate_JSConstructStubHelper(masm, true, false, false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000669}
670
671
672void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100673 Generate_JSConstructStubHelper(masm, false, false, false);
674}
675
676
677void Builtins::Generate_JSBuiltinsConstructStubForDerived(
678 MacroAssembler* masm) {
679 Generate_JSConstructStubHelper(masm, false, false, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680}
681
682
683void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
684 FrameScope scope(masm, StackFrame::INTERNAL);
685 __ Push(a1);
686 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
687}
688
689
690enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
691
692
693// Clobbers a2; preserves all other registers.
694static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
695 IsTagged argc_is_tagged) {
696 // Check the stack for overflow. We are not trying to catch
697 // interruptions (e.g. debug break and preemption) here, so the "real stack
698 // limit" is checked.
699 Label okay;
700 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
701 // Make a2 the space we have left. The stack might already be overflowed
702 // here which will cause a2 to become negative.
703 __ Subu(a2, sp, a2);
704 // Check if the arguments will overflow the stack.
705 if (argc_is_tagged == kArgcIsSmiTagged) {
706 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
707 } else {
708 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
709 __ sll(t3, argc, kPointerSizeLog2);
710 }
711 // Signed comparison.
712 __ Branch(&okay, gt, a2, Operand(t3));
713
714 // Out of stack space.
715 __ CallRuntime(Runtime::kThrowStackOverflow);
716
717 __ bind(&okay);
Ben Murdoch257744e2011-11-30 15:57:28 +0000718}
719
720
721static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
722 bool is_construct) {
723 // Called from JSEntryStub::GenerateBody
724
725 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000726 // -- a0: new.target
Ben Murdoch257744e2011-11-30 15:57:28 +0000727 // -- a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100728 // -- a2: receiver_pointer
Ben Murdoch257744e2011-11-30 15:57:28 +0000729 // -- a3: argc
730 // -- s0: argv
731 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000732 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdoch257744e2011-11-30 15:57:28 +0000733
Ben Murdoch257744e2011-11-30 15:57:28 +0000734 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100735 {
736 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +0000737
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000738 // Setup the context (we need to use the caller context from the isolate).
739 ExternalReference context_address(Isolate::kContextAddress,
740 masm->isolate());
741 __ li(cp, Operand(context_address));
742 __ lw(cp, MemOperand(cp));
Ben Murdoch257744e2011-11-30 15:57:28 +0000743
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100744 // Push the function and the receiver onto the stack.
745 __ Push(a1, a2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000746
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 // Check if we have enough stack space to push all arguments.
748 // Clobbers a2.
749 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
750
751 // Remember new.target.
752 __ mov(t1, a0);
753
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100754 // Copy arguments to the stack in a loop.
755 // a3: argc
756 // s0: argv, i.e. points to first arg
757 Label loop, entry;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100758 __ Lsa(t2, s0, a3, kPointerSizeLog2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100759 __ b(&entry);
760 __ nop(); // Branch delay slot nop.
761 // t2 points past last arg.
762 __ bind(&loop);
763 __ lw(t0, MemOperand(s0)); // Read next parameter.
764 __ addiu(s0, s0, kPointerSize);
765 __ lw(t0, MemOperand(t0)); // Dereference handle.
766 __ push(t0); // Push parameter.
767 __ bind(&entry);
768 __ Branch(&loop, ne, s0, Operand(t2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000769
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000770 // Setup new.target and argc.
771 __ mov(a0, a3);
772 __ mov(a3, t1);
773
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100774 // Initialize all JavaScript callee-saved registers, since they will be seen
775 // by the garbage collector as part of handlers.
776 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
777 __ mov(s1, t0);
778 __ mov(s2, t0);
779 __ mov(s3, t0);
780 __ mov(s4, t0);
781 __ mov(s5, t0);
782 // s6 holds the root address. Do not clobber.
783 // s7 is cp. Do not init.
Ben Murdoch257744e2011-11-30 15:57:28 +0000784
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000785 // Invoke the code.
786 Handle<Code> builtin = is_construct
787 ? masm->isolate()->builtins()->Construct()
788 : masm->isolate()->builtins()->Call();
789 __ Call(builtin, RelocInfo::CODE_TARGET);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000790
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100791 // Leave internal frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000792 }
793
Ben Murdoch257744e2011-11-30 15:57:28 +0000794 __ Jump(ra);
Andrei Popescu31002712010-02-23 13:46:05 +0000795}
796
797
Andrei Popescu31002712010-02-23 13:46:05 +0000798void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000799 Generate_JSEntryTrampolineHelper(masm, false);
Andrei Popescu31002712010-02-23 13:46:05 +0000800}
801
802
803void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000804 Generate_JSEntryTrampolineHelper(masm, true);
Steve Block44f0eee2011-05-26 01:26:41 +0100805}
806
Ben Murdochc5610432016-08-08 18:44:38 +0100807// static
808void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
809 // ----------- S t a t e -------------
810 // -- v0 : the value to pass to the generator
811 // -- a1 : the JSGeneratorObject to resume
812 // -- a2 : the resume mode (tagged)
813 // -- ra : return address
814 // -----------------------------------
815 __ AssertGeneratorObject(a1);
816
817 // Store input value into generator object.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100818 __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
819 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
Ben Murdochc5610432016-08-08 18:44:38 +0100820 kRAHasNotBeenSaved, kDontSaveFPRegs);
821
822 // Store resume mode into generator object.
823 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
824
825 // Load suspended function and context.
826 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
827 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
828
829 // Flood function if we are stepping.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100830 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
831 Label stepping_prepared;
832 ExternalReference last_step_action =
833 ExternalReference::debug_last_step_action_address(masm->isolate());
834 STATIC_ASSERT(StepFrame > StepIn);
835 __ li(t1, Operand(last_step_action));
Ben Murdochc5610432016-08-08 18:44:38 +0100836 __ lb(t1, MemOperand(t1));
Ben Murdoch61f157c2016-09-16 13:49:30 +0100837 __ Branch(&prepare_step_in_if_stepping, ge, t1, Operand(StepIn));
838
839 // Flood function if we need to continue stepping in the suspended generator.
840 ExternalReference debug_suspended_generator =
841 ExternalReference::debug_suspended_generator_address(masm->isolate());
842 __ li(t1, Operand(debug_suspended_generator));
843 __ lw(t1, MemOperand(t1));
844 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1));
845 __ bind(&stepping_prepared);
Ben Murdochc5610432016-08-08 18:44:38 +0100846
847 // Push receiver.
848 __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
849 __ Push(t1);
850
851 // ----------- S t a t e -------------
852 // -- a1 : the JSGeneratorObject to resume
853 // -- a2 : the resume mode (tagged)
854 // -- t0 : generator function
855 // -- cp : generator context
856 // -- ra : return address
857 // -- sp[0] : generator receiver
858 // -----------------------------------
859
860 // Push holes for arguments to generator function. Since the parser forced
861 // context allocation for any variables in generators, the actual argument
862 // values have already been copied into the context and these dummy values
863 // will never be used.
864 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
865 __ lw(a3,
866 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
867 {
868 Label done_loop, loop;
869 __ bind(&loop);
870 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
871 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
872 __ PushRoot(Heap::kTheHoleValueRootIndex);
873 __ Branch(&loop);
874 __ bind(&done_loop);
875 }
876
877 // Dispatch on the kind of generator object.
878 Label old_generator;
879 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
880 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
881 __ GetObjectType(a3, a3, a3);
882 __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE));
883
884 // New-style (ignition/turbofan) generator object.
885 {
886 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
887 __ lw(a0,
888 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
889 __ SmiUntag(a0);
890 // We abuse new.target both to indicate that this is a resume call and to
891 // pass in the generator object. In ordinary calls, new.target is always
892 // undefined because generator functions are non-constructable.
893 __ Move(a3, a1);
894 __ Move(a1, t0);
895 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
896 __ Jump(a2);
897 }
898
899 // Old-style (full-codegen) generator object
900 __ bind(&old_generator);
901 {
902 // Enter a new JavaScript frame, and initialize its slots as they were when
903 // the generator was suspended.
904 FrameScope scope(masm, StackFrame::MANUAL);
905 __ Push(ra, fp);
906 __ Move(fp, sp);
907 __ Push(cp, t0);
908
909 // Restore the operand stack.
910 __ lw(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
911 __ lw(a3, FieldMemOperand(a0, FixedArray::kLengthOffset));
912 __ Addu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
913 __ Lsa(a3, a0, a3, kPointerSizeLog2 - 1);
914 {
915 Label done_loop, loop;
916 __ bind(&loop);
917 __ Branch(&done_loop, eq, a0, Operand(a3));
918 __ lw(t1, MemOperand(a0));
919 __ Push(t1);
920 __ Branch(USE_DELAY_SLOT, &loop);
921 __ addiu(a0, a0, kPointerSize); // In delay slot.
922 __ bind(&done_loop);
923 }
924
925 // Reset operand stack so we don't leak.
926 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
927 __ sw(t1, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
928
929 // Resume the generator function at the continuation.
930 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
931 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
932 __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
933 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
934 __ SmiUntag(a2);
935 __ Addu(a3, a3, Operand(a2));
936 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
937 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
938 __ Move(v0, a1); // Continuation expects generator object in v0.
939 __ Jump(a3);
940 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100941
942 __ bind(&prepare_step_in_if_stepping);
943 {
944 FrameScope scope(masm, StackFrame::INTERNAL);
945 __ Push(a1, a2, t0);
946 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
947 __ Pop(a1, a2);
948 }
949 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
950 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
951
952 __ bind(&prepare_step_in_suspended_generator);
953 {
954 FrameScope scope(masm, StackFrame::INTERNAL);
955 __ Push(a1, a2);
956 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
957 __ Pop(a1, a2);
958 }
959 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
960 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
961}
962
963static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
964 Register args_count = scratch;
965
966 // Get the arguments + receiver count.
967 __ lw(args_count,
968 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
969 __ lw(args_count,
970 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
971
972 // Leave the frame (also dropping the register file).
973 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
974
975 // Drop receiver + arguments.
976 __ Addu(sp, sp, args_count);
Ben Murdochc5610432016-08-08 18:44:38 +0100977}
Steve Block44f0eee2011-05-26 01:26:41 +0100978
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000979// Generate code for entering a JS function with the interpreter.
980// On entry to the function the receiver and arguments have been pushed on the
981// stack left to right. The actual argument count matches the formal parameter
982// count expected by the function.
983//
984// The live registers are:
985// o a1: the JS function object being called.
986// o a3: the new target
987// o cp: our context
988// o fp: the caller's frame pointer
989// o sp: stack pointer
990// o ra: return address
991//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100992// The function builds an interpreter frame. See InterpreterFrameConstants in
993// frames.h for its layout.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000994void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +0100995 ProfileEntryHookStub::MaybeCallEntryHook(masm);
996
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000997 // Open a frame scope to indicate that there is a frame on the stack. The
998 // MANUAL indicates that the scope shouldn't actually generate code to set up
999 // the frame (that is done below).
1000 FrameScope frame_scope(masm, StackFrame::MANUAL);
Ben Murdochda12d292016-06-02 14:46:10 +01001001 __ PushStandardFrame(a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001002
Ben Murdochc5610432016-08-08 18:44:38 +01001003 // Get the bytecode array from the function object (or from the DebugInfo if
1004 // it is present) and load it into kInterpreterBytecodeArrayRegister.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001006 Label load_debug_bytecode_array, bytecode_array_loaded;
1007 Register debug_info = kInterpreterBytecodeArrayRegister;
1008 DCHECK(!debug_info.is(a0));
1009 __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
1010 __ Branch(&load_debug_bytecode_array, ne, debug_info,
1011 Operand(DebugInfo::uninitialized()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001012 __ lw(kInterpreterBytecodeArrayRegister,
1013 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001014 __ bind(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001015
Ben Murdochc5610432016-08-08 18:44:38 +01001016 // Check function data field is actually a BytecodeArray object.
1017 Label bytecode_array_not_present;
1018 __ JumpIfRoot(kInterpreterBytecodeArrayRegister,
1019 Heap::kUndefinedValueRootIndex, &bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001020 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001021 __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
1022 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1023 Operand(zero_reg));
1024 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
1025 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1026 Operand(BYTECODE_ARRAY_TYPE));
1027 }
1028
Ben Murdochc5610432016-08-08 18:44:38 +01001029 // Load initial bytecode offset.
1030 __ li(kInterpreterBytecodeOffsetRegister,
1031 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1032
1033 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1034 __ SmiTag(t0, kInterpreterBytecodeOffsetRegister);
1035 __ Push(a3, kInterpreterBytecodeArrayRegister, t0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001036
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001037 // Allocate the local and temporary register file on the stack.
1038 {
1039 // Load frame size from the BytecodeArray object.
1040 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1041 BytecodeArray::kFrameSizeOffset));
1042
1043 // Do a stack check to ensure we don't go over the limit.
1044 Label ok;
1045 __ Subu(t1, sp, Operand(t0));
1046 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1047 __ Branch(&ok, hs, t1, Operand(a2));
1048 __ CallRuntime(Runtime::kThrowStackOverflow);
1049 __ bind(&ok);
1050
1051 // If ok, push undefined as the initial value for all register file entries.
1052 Label loop_header;
1053 Label loop_check;
1054 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
1055 __ Branch(&loop_check);
1056 __ bind(&loop_header);
1057 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1058 __ push(t1);
1059 // Continue loop if not done.
1060 __ bind(&loop_check);
1061 __ Subu(t0, t0, Operand(kPointerSize));
1062 __ Branch(&loop_header, ge, t0, Operand(zero_reg));
1063 }
1064
Ben Murdochc5610432016-08-08 18:44:38 +01001065 // Load accumulator and dispatch table into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001066 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001067 __ li(kInterpreterDispatchTableRegister,
1068 Operand(ExternalReference::interpreter_dispatch_table_address(
1069 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001070
1071 // Dispatch to the first bytecode handler for the function.
1072 __ Addu(a0, kInterpreterBytecodeArrayRegister,
1073 kInterpreterBytecodeOffsetRegister);
1074 __ lbu(a0, MemOperand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001075 __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001076 __ lw(at, MemOperand(at));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001077 __ Call(at);
Ben Murdochc5610432016-08-08 18:44:38 +01001078 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001079
Ben Murdochc5610432016-08-08 18:44:38 +01001080 // The return value is in v0.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001081 LeaveInterpreterFrame(masm, t0);
Ben Murdochc5610432016-08-08 18:44:38 +01001082 __ Jump(ra);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001083
1084 // Load debug copy of the bytecode array.
1085 __ bind(&load_debug_bytecode_array);
1086 __ lw(kInterpreterBytecodeArrayRegister,
1087 FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1088 __ Branch(&bytecode_array_loaded);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001089
Ben Murdochc5610432016-08-08 18:44:38 +01001090 // If the bytecode array is no longer present, then the underlying function
1091 // has been switched to a different kind of code and we heal the closure by
1092 // switching the code entry field over to the new code object as well.
1093 __ bind(&bytecode_array_not_present);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
Ben Murdochc5610432016-08-08 18:44:38 +01001095 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1096 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset));
1097 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1098 __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1099 __ RecordWriteCodeEntryField(a1, t0, t1);
1100 __ Jump(t0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101}
1102
Ben Murdoch61f157c2016-09-16 13:49:30 +01001103void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
1104 // Save the function and context for call to CompileBaseline.
1105 __ lw(a1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1106 __ lw(kContextRegister,
1107 MemOperand(fp, StandardFrameConstants::kContextOffset));
1108
1109 // Leave the frame before recompiling for baseline so that we don't count as
1110 // an activation on the stack.
1111 LeaveInterpreterFrame(masm, t0);
1112
1113 {
1114 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1115 // Push return value.
1116 __ push(v0);
1117
1118 // Push function as argument and compile for baseline.
1119 __ push(a1);
1120 __ CallRuntime(Runtime::kCompileBaseline);
1121
1122 // Restore return value.
1123 __ pop(v0);
1124 }
1125 __ Jump(ra);
1126}
1127
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001128// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01001129void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1130 MacroAssembler* masm, TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001131 // ----------- S t a t e -------------
1132 // -- a0 : the number of arguments (not including the receiver)
1133 // -- a2 : the address of the first argument to be pushed. Subsequent
1134 // arguments should be consecutive above this, in the same order as
1135 // they are to be pushed onto the stack.
1136 // -- a1 : the target to call (can be any Object).
1137 // -----------------------------------
1138
1139 // Find the address of the last argument.
1140 __ Addu(a3, a0, Operand(1)); // Add one for receiver.
1141 __ sll(a3, a3, kPointerSizeLog2);
1142 __ Subu(a3, a2, Operand(a3));
1143
1144 // Push the arguments.
1145 Label loop_header, loop_check;
1146 __ Branch(&loop_check);
1147 __ bind(&loop_header);
1148 __ lw(t0, MemOperand(a2));
1149 __ Addu(a2, a2, Operand(-kPointerSize));
1150 __ push(t0);
1151 __ bind(&loop_check);
1152 __ Branch(&loop_header, gt, a2, Operand(a3));
1153
1154 // Call the target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001155 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1156 tail_call_mode),
1157 RelocInfo::CODE_TARGET);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158}
1159
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160// static
1161void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1162 // ----------- S t a t e -------------
1163 // -- a0 : argument count (not including receiver)
1164 // -- a3 : new target
1165 // -- a1 : constructor to call
1166 // -- a2 : address of the first argument
1167 // -----------------------------------
1168
1169 // Find the address of the last argument.
1170 __ sll(t0, a0, kPointerSizeLog2);
1171 __ Subu(t0, a2, Operand(t0));
1172
1173 // Push a slot for the receiver.
1174 __ push(zero_reg);
1175
1176 // Push the arguments.
1177 Label loop_header, loop_check;
1178 __ Branch(&loop_check);
1179 __ bind(&loop_header);
1180 __ lw(t1, MemOperand(a2));
1181 __ Addu(a2, a2, Operand(-kPointerSize));
1182 __ push(t1);
1183 __ bind(&loop_check);
1184 __ Branch(&loop_header, gt, a2, Operand(t0));
1185
1186 // Call the constructor with a0, a1, and a3 unmodified.
1187 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1188}
1189
Ben Murdochc5610432016-08-08 18:44:38 +01001190void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1191 // Set the return address to the correct point in the interpreter entry
1192 // trampoline.
1193 Smi* interpreter_entry_return_pc_offset(
1194 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1195 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1196 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1197 __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1198 Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001199
Ben Murdochc5610432016-08-08 18:44:38 +01001200 // Initialize the dispatch table register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001201 __ li(kInterpreterDispatchTableRegister,
1202 Operand(ExternalReference::interpreter_dispatch_table_address(
1203 masm->isolate())));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001204
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001205 // Get the bytecode array pointer from the frame.
Ben Murdochc5610432016-08-08 18:44:38 +01001206 __ lw(kInterpreterBytecodeArrayRegister,
1207 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001208
1209 if (FLAG_debug_code) {
1210 // Check function data field is actually a BytecodeArray object.
1211 __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1212 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1213 Operand(zero_reg));
1214 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1215 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1216 Operand(BYTECODE_ARRAY_TYPE));
1217 }
1218
1219 // Get the target bytecode offset from the frame.
1220 __ lw(kInterpreterBytecodeOffsetRegister,
Ben Murdochc5610432016-08-08 18:44:38 +01001221 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001222 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1223
1224 // Dispatch to the target bytecode.
1225 __ Addu(a1, kInterpreterBytecodeArrayRegister,
1226 kInterpreterBytecodeOffsetRegister);
1227 __ lbu(a1, MemOperand(a1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001228 __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001229 __ lw(a1, MemOperand(a1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001230 __ Jump(a1);
1231}
1232
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001233void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01001234 // ----------- S t a t e -------------
1235 // -- a0 : argument count (preserved for callee)
1236 // -- a3 : new target (preserved for callee)
1237 // -- a1 : target function (preserved for callee)
1238 // -----------------------------------
1239 // First lookup code, maybe we don't need to compile!
1240 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1241 Label maybe_call_runtime;
1242 Label try_shared;
1243 Label loop_top, loop_bottom;
1244
1245 Register argument_count = a0;
1246 Register closure = a1;
1247 Register new_target = a3;
1248 __ push(argument_count);
1249 __ push(new_target);
1250 __ push(closure);
1251
1252 Register map = a0;
1253 Register index = a2;
1254 __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1255 __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1256 __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1257 __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
1258
1259 // Find literals.
1260 // a3 : native context
1261 // a2 : length / index
1262 // a0 : optimized code map
1263 // stack[0] : new target
1264 // stack[4] : closure
1265 Register native_context = a3;
1266 __ lw(native_context, NativeContextMemOperand());
1267
1268 __ bind(&loop_top);
1269 Register temp = a1;
1270 Register array_pointer = t1;
1271
1272 // Does the native context match?
1273 __ sll(at, index, kPointerSizeLog2 - kSmiTagSize);
1274 __ Addu(array_pointer, map, Operand(at));
1275 __ lw(temp, FieldMemOperand(array_pointer,
1276 SharedFunctionInfo::kOffsetToPreviousContext));
1277 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1278 __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1279 // OSR id set to none?
1280 __ lw(temp, FieldMemOperand(array_pointer,
1281 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1282 const int bailout_id = BailoutId::None().ToInt();
1283 __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001284
Ben Murdochc5610432016-08-08 18:44:38 +01001285 // Literals available?
Ben Murdoch61f157c2016-09-16 13:49:30 +01001286 Label got_literals, maybe_cleared_weakcell;
Ben Murdochc5610432016-08-08 18:44:38 +01001287 __ lw(temp, FieldMemOperand(array_pointer,
1288 SharedFunctionInfo::kOffsetToPreviousLiterals));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001289 // temp contains either a WeakCell pointing to the literals array or the
1290 // literals array directly.
1291 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
1292 __ lw(t0, FieldMemOperand(temp, WeakCell::kValueOffset));
1293 __ JumpIfSmi(t0, &maybe_cleared_weakcell);
1294 // t0 is a pointer, therefore temp is a WeakCell pointing to a literals array.
Ben Murdochc5610432016-08-08 18:44:38 +01001295 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001296 __ jmp(&got_literals);
1297
1298 // t0 is a smi. If it's 0, then we are looking at a cleared WeakCell
1299 // around the literals array, and we should visit the runtime. If it's > 0,
1300 // then temp already contains the literals array.
1301 __ bind(&maybe_cleared_weakcell);
1302 __ Branch(&gotta_call_runtime, eq, t0, Operand(Smi::FromInt(0)));
Ben Murdochc5610432016-08-08 18:44:38 +01001303
1304 // Save the literals in the closure.
Ben Murdoch61f157c2016-09-16 13:49:30 +01001305 __ bind(&got_literals);
Ben Murdochc5610432016-08-08 18:44:38 +01001306 __ lw(t0, MemOperand(sp, 0));
1307 __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
1308 __ push(index);
1309 __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index,
1310 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1311 OMIT_SMI_CHECK);
1312 __ pop(index);
1313
1314 // Code available?
1315 Register entry = t0;
1316 __ lw(entry,
1317 FieldMemOperand(array_pointer,
1318 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1319 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1320 __ JumpIfSmi(entry, &maybe_call_runtime);
1321
1322 // Found literals and code. Get them into the closure and return.
1323 __ pop(closure);
1324 // Store code entry in the closure.
1325 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1326
1327 Label install_optimized_code_and_tailcall;
1328 __ bind(&install_optimized_code_and_tailcall);
1329 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1330 __ RecordWriteCodeEntryField(closure, entry, t1);
1331
1332 // Link the closure into the optimized function list.
1333 // t0 : code entry
1334 // a3 : native context
1335 // a1 : closure
1336 __ lw(t1,
1337 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1338 __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1339 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0,
1340 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1341 OMIT_SMI_CHECK);
1342 const int function_list_offset =
1343 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1344 __ sw(closure,
1345 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1346 // Save closure before the write barrier.
1347 __ mov(t1, closure);
1348 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1349 kRAHasNotBeenSaved, kDontSaveFPRegs);
1350 __ mov(closure, t1);
1351 __ pop(new_target);
1352 __ pop(argument_count);
1353 __ Jump(entry);
1354
1355 __ bind(&loop_bottom);
1356 __ Subu(index, index,
1357 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1358 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1359
1360 // We found neither literals nor code.
1361 __ jmp(&gotta_call_runtime);
1362
1363 __ bind(&maybe_call_runtime);
1364 __ pop(closure);
1365
1366 // Last possibility. Check the context free optimized code map entry.
1367 __ lw(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1368 SharedFunctionInfo::kSharedCodeIndex));
1369 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1370 __ JumpIfSmi(entry, &try_shared);
1371
1372 // Store code entry in the closure.
1373 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1374 __ jmp(&install_optimized_code_and_tailcall);
1375
1376 __ bind(&try_shared);
1377 __ pop(new_target);
1378 __ pop(argument_count);
1379 // Is the full code valid?
1380 __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1381 __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1382 __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset));
1383 __ And(t1, t1, Operand(Code::KindField::kMask));
1384 __ srl(t1, t1, Code::KindField::kShift);
1385 __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN));
1386 // Yes, install the full code.
1387 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1388 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1389 __ RecordWriteCodeEntryField(closure, entry, t1);
1390 __ Jump(entry);
1391
1392 __ bind(&gotta_call_runtime);
1393 __ pop(closure);
1394 __ pop(new_target);
1395 __ pop(argument_count);
1396 __ bind(&gotta_call_runtime_no_stack);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001397 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
Steve Block44f0eee2011-05-26 01:26:41 +01001398}
1399
Ben Murdochc5610432016-08-08 18:44:38 +01001400void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1401 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1402}
Steve Block44f0eee2011-05-26 01:26:41 +01001403
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001405 GenerateTailCallToReturnedCode(masm,
1406 Runtime::kCompileOptimized_NotConcurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001407}
1408
1409
1410void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001411 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001412}
1413
1414
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1416 // For now, we are relying on the fact that make_code_young doesn't do any
1417 // garbage collection which allows us to save/restore the registers without
1418 // worrying about which of them contain pointers. We also don't build an
1419 // internal frame to make the code faster, since we shouldn't have to do stack
1420 // crawls in MakeCodeYoung. This seems a bit fragile.
1421
1422 // Set a0 to point to the head of the PlatformCodeAge sequence.
1423 __ Subu(a0, a0,
1424 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1425
1426 // The following registers must be saved and restored when calling through to
1427 // the runtime:
1428 // a0 - contains return address (beginning of patch sequence)
1429 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001430 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001431 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001432 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433 FrameScope scope(masm, StackFrame::MANUAL);
1434 __ MultiPush(saved_regs);
1435 __ PrepareCallCFunction(2, 0, a2);
1436 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1437 __ CallCFunction(
1438 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1439 __ MultiPop(saved_regs);
1440 __ Jump(a0);
1441}
1442
1443#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1444void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1445 MacroAssembler* masm) { \
1446 GenerateMakeCodeYoungAgainCommon(masm); \
1447} \
1448void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1449 MacroAssembler* masm) { \
1450 GenerateMakeCodeYoungAgainCommon(masm); \
1451}
1452CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1453#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1454
1455
1456void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1457 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1458 // that make_code_young doesn't do any garbage collection which allows us to
1459 // save/restore the registers without worrying about which of them contain
1460 // pointers.
1461
1462 // Set a0 to point to the head of the PlatformCodeAge sequence.
1463 __ Subu(a0, a0,
1464 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1465
1466 // The following registers must be saved and restored when calling through to
1467 // the runtime:
1468 // a0 - contains return address (beginning of patch sequence)
1469 // a1 - isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001470 // a3 - new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001471 RegList saved_regs =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001472 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001473 FrameScope scope(masm, StackFrame::MANUAL);
1474 __ MultiPush(saved_regs);
1475 __ PrepareCallCFunction(2, 0, a2);
1476 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1477 __ CallCFunction(
1478 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1479 2);
1480 __ MultiPop(saved_regs);
1481
1482 // Perform prologue operations usually performed by the young code stub.
Ben Murdochda12d292016-06-02 14:46:10 +01001483 __ PushStandardFrame(a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001484
1485 // Jump to point after the code-age stub.
1486 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1487 __ Jump(a0);
1488}
1489
1490
1491void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1492 GenerateMakeCodeYoungAgainCommon(masm);
1493}
1494
1495
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001496void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1497 Generate_MarkCodeAsExecutedOnce(masm);
1498}
1499
1500
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001501static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1502 SaveFPRegsMode save_doubles) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001503 {
1504 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +00001505
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001506 // Preserve registers across notification, this is important for compiled
1507 // stubs that tail call the runtime on deopts passing their parameters in
1508 // registers.
1509 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1510 // Pass the function and deoptimization type to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001512 __ MultiPop(kJSCallerSaved | kCalleeSaved);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001513 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001514
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
1516 __ Jump(ra); // Jump to miss handler
1517}
1518
1519
1520void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1521 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1522}
1523
1524
1525void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1526 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Steve Block44f0eee2011-05-26 01:26:41 +01001527}
1528
1529
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001530static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1531 Deoptimizer::BailoutType type) {
1532 {
1533 FrameScope scope(masm, StackFrame::INTERNAL);
1534 // Pass the function and deoptimization type to the runtime system.
1535 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1536 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001537 __ CallRuntime(Runtime::kNotifyDeoptimized);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001538 }
1539
1540 // Get the full codegen state from the stack and untag it -> t2.
1541 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1542 __ SmiUntag(t2);
1543 // Switch on the state.
1544 Label with_tos_register, unknown_state;
Ben Murdochc5610432016-08-08 18:44:38 +01001545 __ Branch(&with_tos_register, ne, t2,
1546 Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001547 __ Ret(USE_DELAY_SLOT);
1548 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001549 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001550
1551 __ bind(&with_tos_register);
Ben Murdochc5610432016-08-08 18:44:38 +01001552 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001553 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001554 __ Branch(&unknown_state, ne, t2,
1555 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001556
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557 __ Ret(USE_DELAY_SLOT);
1558 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001559 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001560
1561 __ bind(&unknown_state);
1562 __ stop("no cases left");
1563}
1564
1565
Steve Block44f0eee2011-05-26 01:26:41 +01001566void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001567 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
Steve Block44f0eee2011-05-26 01:26:41 +01001568}
1569
1570
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001571void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1572 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1573}
1574
1575
Steve Block44f0eee2011-05-26 01:26:41 +01001576void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001577 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Steve Block44f0eee2011-05-26 01:26:41 +01001578}
1579
1580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581// Clobbers {t2, t3, t4, t5}.
1582static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1583 Register function_template_info,
1584 Label* receiver_check_failed) {
1585 Register signature = t2;
1586 Register map = t3;
1587 Register constructor = t4;
1588 Register scratch = t5;
1589
1590 // If there is no signature, return the holder.
1591 __ lw(signature, FieldMemOperand(function_template_info,
1592 FunctionTemplateInfo::kSignatureOffset));
1593 Label receiver_check_passed;
1594 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1595 &receiver_check_passed);
1596
1597 // Walk the prototype chain.
1598 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1599 Label prototype_loop_start;
1600 __ bind(&prototype_loop_start);
1601
1602 // Get the constructor, if any.
1603 __ GetMapConstructor(constructor, map, scratch, scratch);
1604 Label next_prototype;
1605 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1606 Register type = constructor;
1607 __ lw(type,
1608 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1609 __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1610
1611 // Loop through the chain of inheriting function templates.
1612 Label function_template_loop;
1613 __ bind(&function_template_loop);
1614
1615 // If the signatures match, we have a compatible receiver.
1616 __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1617 USE_DELAY_SLOT);
1618
1619 // If the current type is not a FunctionTemplateInfo, load the next prototype
1620 // in the chain.
1621 __ JumpIfSmi(type, &next_prototype);
1622 __ GetObjectType(type, scratch, scratch);
1623 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1624
1625 // Otherwise load the parent function template and iterate.
1626 __ lw(type,
1627 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1628 __ Branch(&function_template_loop);
1629
1630 // Load the next prototype and iterate.
1631 __ bind(&next_prototype);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632 __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001633 __ DecodeField<Map::HasHiddenPrototype>(scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001635 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1636 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001637
1638 __ Branch(&prototype_loop_start);
1639
1640 __ bind(&receiver_check_passed);
1641}
1642
1643
1644void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1645 // ----------- S t a t e -------------
1646 // -- a0 : number of arguments excluding receiver
1647 // -- a1 : callee
1648 // -- ra : return address
1649 // -- sp[0] : last argument
1650 // -- ...
1651 // -- sp[4 * (argc - 1)] : first argument
1652 // -- sp[4 * argc] : receiver
1653 // -----------------------------------
1654
1655 // Load the FunctionTemplateInfo.
1656 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1657 __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1658
1659 // Do the compatible receiver check.
1660 Label receiver_check_failed;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001661 __ Lsa(t8, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001662 __ lw(t0, MemOperand(t8));
1663 CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1664
1665 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1666 // beginning of the code.
1667 __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1668 __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1669 __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1670 __ Jump(t2);
1671
1672 // Compatible receiver check failed: throw an Illegal Invocation exception.
1673 __ bind(&receiver_check_failed);
1674 // Drop the arguments (including the receiver);
1675 __ Addu(t8, t8, Operand(kPointerSize));
1676 __ addu(sp, t8, zero_reg);
1677 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1678}
1679
1680
Steve Block44f0eee2011-05-26 01:26:41 +01001681void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682 // Lookup the function in the JavaScript frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001683 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1684 {
1685 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 // Pass function as argument.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001687 __ push(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001689 }
1690
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 // If the code object is null, just return to the unoptimized code.
1692 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001693
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001694 // Load deoptimization data from the code object.
1695 // <deopt_data> = <code>[#deoptimization_data_offset]
1696 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001697
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 // Load the OSR entrypoint offset from the deoptimization data.
1699 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1700 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1701 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1702 __ SmiUntag(a1);
1703
1704 // Compute the target address = code_obj + header_size + osr_offset
1705 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1706 __ addu(v0, v0, a1);
1707 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1708
1709 // And "return" to the OSR entry point of the function.
1710 __ Ret();
1711}
1712
1713
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001714// static
1715void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1716 int field_index) {
1717 // ----------- S t a t e -------------
Ben Murdoch61f157c2016-09-16 13:49:30 +01001718 // -- a0 : number of arguments
1719 // -- a1 : function
1720 // -- cp : context
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001721 // -- sp[0] : receiver
1722 // -----------------------------------
1723
1724 // 1. Pop receiver into a0 and check that it's actually a JSDate object.
1725 Label receiver_not_date;
1726 {
1727 __ Pop(a0);
1728 __ JumpIfSmi(a0, &receiver_not_date);
1729 __ GetObjectType(a0, t0, t0);
1730 __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
1731 }
1732
1733 // 2. Load the specified date field, falling back to the runtime as necessary.
1734 if (field_index == JSDate::kDateValue) {
1735 __ Ret(USE_DELAY_SLOT);
1736 __ lw(v0, FieldMemOperand(a0, JSDate::kValueOffset)); // In delay slot.
1737 } else {
1738 if (field_index < JSDate::kFirstUncachedField) {
1739 Label stamp_mismatch;
1740 __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1741 __ lw(a1, MemOperand(a1));
1742 __ lw(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
1743 __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
1744 __ Ret(USE_DELAY_SLOT);
1745 __ lw(v0, FieldMemOperand(
1746 a0, JSDate::kValueOffset +
1747 field_index * kPointerSize)); // In delay slot.
1748 __ bind(&stamp_mismatch);
1749 }
1750 FrameScope scope(masm, StackFrame::INTERNAL);
1751 __ PrepareCallCFunction(2, t0);
1752 __ li(a1, Operand(Smi::FromInt(field_index)));
1753 __ CallCFunction(
1754 ExternalReference::get_date_field_function(masm->isolate()), 2);
1755 }
1756 __ Ret();
1757
1758 // 3. Raise a TypeError if the receiver is not a date.
1759 __ bind(&receiver_not_date);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001760 {
1761 FrameScope scope(masm, StackFrame::MANUAL);
1762 __ Push(a0, ra, fp);
1763 __ Move(fp, sp);
1764 __ Push(cp, a1);
1765 __ Push(Smi::FromInt(0));
1766 __ CallRuntime(Runtime::kThrowNotDateError);
1767 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001768}
1769
Ben Murdochda12d292016-06-02 14:46:10 +01001770// static
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001771void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1772 // ----------- S t a t e -------------
1773 // -- a0 : argc
1774 // -- sp[0] : argArray
1775 // -- sp[4] : thisArg
1776 // -- sp[8] : receiver
1777 // -----------------------------------
1778
1779 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1780 // arguments from the stack (including the receiver), and push thisArg (if
1781 // present) instead.
1782 {
1783 Label no_arg;
1784 Register scratch = t0;
1785 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1786 __ mov(a3, a2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001787 // Lsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001788 __ sll(scratch, a0, kPointerSizeLog2);
1789 __ Addu(a0, sp, Operand(scratch));
1790 __ lw(a1, MemOperand(a0)); // receiver
1791 __ Subu(a0, a0, Operand(kPointerSize));
1792 __ Branch(&no_arg, lt, a0, Operand(sp));
1793 __ lw(a2, MemOperand(a0)); // thisArg
1794 __ Subu(a0, a0, Operand(kPointerSize));
1795 __ Branch(&no_arg, lt, a0, Operand(sp));
1796 __ lw(a3, MemOperand(a0)); // argArray
1797 __ bind(&no_arg);
1798 __ Addu(sp, sp, Operand(scratch));
1799 __ sw(a2, MemOperand(sp));
1800 __ mov(a0, a3);
1801 }
1802
1803 // ----------- S t a t e -------------
1804 // -- a0 : argArray
1805 // -- a1 : receiver
1806 // -- sp[0] : thisArg
1807 // -----------------------------------
1808
1809 // 2. Make sure the receiver is actually callable.
1810 Label receiver_not_callable;
1811 __ JumpIfSmi(a1, &receiver_not_callable);
1812 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1813 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1814 __ And(t0, t0, Operand(1 << Map::kIsCallable));
1815 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
1816
1817 // 3. Tail call with no arguments if argArray is null or undefined.
1818 Label no_arguments;
1819 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1820 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1821
1822 // 4a. Apply the receiver to the given argArray (passing undefined for
1823 // new.target).
1824 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1825 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1826
1827 // 4b. The argArray is either null or undefined, so we tail call without any
1828 // arguments to the receiver.
1829 __ bind(&no_arguments);
1830 {
1831 __ mov(a0, zero_reg);
1832 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1833 }
1834
1835 // 4c. The receiver is not callable, throw an appropriate TypeError.
1836 __ bind(&receiver_not_callable);
1837 {
1838 __ sw(a1, MemOperand(sp));
1839 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1840 }
1841}
1842
1843
1844// static
1845void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001846 // 1. Make sure we have at least one argument.
1847 // a0: actual number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001848 {
1849 Label done;
Ben Murdoch257744e2011-11-30 15:57:28 +00001850 __ Branch(&done, ne, a0, Operand(zero_reg));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001852 __ Addu(a0, a0, Operand(1));
1853 __ bind(&done);
1854 }
1855
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001856 // 2. Get the function to call (passed as receiver) from the stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00001857 // a0: actual number of arguments
Ben Murdoch097c5b22016-05-18 11:27:45 +01001858 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001859 __ lw(a1, MemOperand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +00001860
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001861 // 3. Shift arguments and return address one slot down on the stack
Ben Murdoch257744e2011-11-30 15:57:28 +00001862 // (overwriting the original receiver). Adjust argument count to make
1863 // the original first argument the new receiver.
1864 // a0: actual number of arguments
1865 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001866 {
1867 Label loop;
Ben Murdoch257744e2011-11-30 15:57:28 +00001868 // Calculate the copy start address (destination). Copy end address is sp.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001869 __ Lsa(a2, sp, a0, kPointerSizeLog2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001870
1871 __ bind(&loop);
1872 __ lw(at, MemOperand(a2, -kPointerSize));
1873 __ sw(at, MemOperand(a2));
1874 __ Subu(a2, a2, Operand(kPointerSize));
1875 __ Branch(&loop, ne, a2, Operand(sp));
1876 // Adjust the actual number of arguments and remove the top element
1877 // (which is a copy of the last argument).
1878 __ Subu(a0, a0, Operand(1));
1879 __ Pop();
1880 }
1881
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001882 // 4. Call the callable.
1883 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
Andrei Popescu31002712010-02-23 13:46:05 +00001884}
1885
1886
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001887void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1888 // ----------- S t a t e -------------
1889 // -- a0 : argc
1890 // -- sp[0] : argumentsList
1891 // -- sp[4] : thisArgument
1892 // -- sp[8] : target
1893 // -- sp[12] : receiver
1894 // -----------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00001895
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001896 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1897 // remove all arguments from the stack (including the receiver), and push
1898 // thisArgument (if present) instead.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001899 {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001900 Label no_arg;
1901 Register scratch = t0;
1902 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1903 __ mov(a2, a1);
1904 __ mov(a3, a1);
1905 __ sll(scratch, a0, kPointerSizeLog2);
1906 __ mov(a0, scratch);
1907 __ Subu(a0, a0, Operand(kPointerSize));
1908 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1909 __ Addu(a0, sp, Operand(a0));
1910 __ lw(a1, MemOperand(a0)); // target
1911 __ Subu(a0, a0, Operand(kPointerSize));
1912 __ Branch(&no_arg, lt, a0, Operand(sp));
1913 __ lw(a2, MemOperand(a0)); // thisArgument
1914 __ Subu(a0, a0, Operand(kPointerSize));
1915 __ Branch(&no_arg, lt, a0, Operand(sp));
1916 __ lw(a3, MemOperand(a0)); // argumentsList
1917 __ bind(&no_arg);
1918 __ Addu(sp, sp, Operand(scratch));
1919 __ sw(a2, MemOperand(sp));
1920 __ mov(a0, a3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001921 }
1922
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001923 // ----------- S t a t e -------------
1924 // -- a0 : argumentsList
1925 // -- a1 : target
1926 // -- sp[0] : thisArgument
1927 // -----------------------------------
1928
1929 // 2. Make sure the target is actually callable.
1930 Label target_not_callable;
1931 __ JumpIfSmi(a1, &target_not_callable);
1932 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1933 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1934 __ And(t0, t0, Operand(1 << Map::kIsCallable));
1935 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
1936
1937 // 3a. Apply the target to the given argumentsList (passing undefined for
1938 // new.target).
1939 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1940 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1941
1942 // 3b. The target is not callable, throw an appropriate TypeError.
1943 __ bind(&target_not_callable);
1944 {
1945 __ sw(a1, MemOperand(sp));
1946 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1947 }
1948}
1949
1950
1951void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1952 // ----------- S t a t e -------------
1953 // -- a0 : argc
1954 // -- sp[0] : new.target (optional)
1955 // -- sp[4] : argumentsList
1956 // -- sp[8] : target
1957 // -- sp[12] : receiver
1958 // -----------------------------------
1959
1960 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1961 // new.target into a3 (if present, otherwise use target), remove all
1962 // arguments from the stack (including the receiver), and push thisArgument
1963 // (if present) instead.
1964 {
1965 Label no_arg;
1966 Register scratch = t0;
1967 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1968 __ mov(a2, a1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001969 // Lsa() cannot be used hare as scratch value used later.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001970 __ sll(scratch, a0, kPointerSizeLog2);
1971 __ Addu(a0, sp, Operand(scratch));
1972 __ sw(a2, MemOperand(a0)); // receiver
1973 __ Subu(a0, a0, Operand(kPointerSize));
1974 __ Branch(&no_arg, lt, a0, Operand(sp));
1975 __ lw(a1, MemOperand(a0)); // target
1976 __ mov(a3, a1); // new.target defaults to target
1977 __ Subu(a0, a0, Operand(kPointerSize));
1978 __ Branch(&no_arg, lt, a0, Operand(sp));
1979 __ lw(a2, MemOperand(a0)); // argumentsList
1980 __ Subu(a0, a0, Operand(kPointerSize));
1981 __ Branch(&no_arg, lt, a0, Operand(sp));
1982 __ lw(a3, MemOperand(a0)); // new.target
1983 __ bind(&no_arg);
1984 __ Addu(sp, sp, Operand(scratch));
1985 __ mov(a0, a2);
1986 }
1987
1988 // ----------- S t a t e -------------
1989 // -- a0 : argumentsList
1990 // -- a3 : new.target
1991 // -- a1 : target
1992 // -- sp[0] : receiver (undefined)
1993 // -----------------------------------
1994
1995 // 2. Make sure the target is actually a constructor.
1996 Label target_not_constructor;
1997 __ JumpIfSmi(a1, &target_not_constructor);
1998 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1999 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2000 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2001 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg));
2002
2003 // 3. Make sure the target is actually a constructor.
2004 Label new_target_not_constructor;
2005 __ JumpIfSmi(a3, &new_target_not_constructor);
2006 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
2007 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2008 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2009 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg));
2010
2011 // 4a. Construct the target with the given new.target and argumentsList.
2012 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2013
2014 // 4b. The target is not a constructor, throw an appropriate TypeError.
2015 __ bind(&target_not_constructor);
2016 {
2017 __ sw(a1, MemOperand(sp));
2018 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2019 }
2020
2021 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2022 __ bind(&new_target_not_constructor);
2023 {
2024 __ sw(a3, MemOperand(sp));
2025 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2026 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002027}
2028
2029
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002030static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2031 Label* stack_overflow) {
2032 // ----------- S t a t e -------------
2033 // -- a0 : actual number of arguments
2034 // -- a1 : function (passed through to callee)
2035 // -- a2 : expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002036 // -- a3 : new target (passed through to callee)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002037 // -----------------------------------
2038 // Check the stack for overflow. We are not trying to catch
2039 // interruptions (e.g. debug break and preemption) here, so the "real stack
2040 // limit" is checked.
2041 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
2042 // Make t1 the space we have left. The stack might already be overflowed
2043 // here which will cause t1 to become negative.
2044 __ subu(t1, sp, t1);
2045 // Check if the arguments will overflow the stack.
2046 __ sll(at, a2, kPointerSizeLog2);
2047 // Signed comparison.
2048 __ Branch(stack_overflow, le, t1, Operand(at));
2049}
2050
2051
Ben Murdoch257744e2011-11-30 15:57:28 +00002052static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2053 __ sll(a0, a0, kSmiTagSize);
2054 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2055 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056 __ Addu(fp, sp,
2057 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002058}
2059
2060
2061static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2062 // ----------- S t a t e -------------
2063 // -- v0 : result being passed through
2064 // -----------------------------------
2065 // Get the number of arguments passed (as a smi), tear down the frame and
2066 // then tear down the parameters.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002067 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2068 kPointerSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002069 __ mov(sp, fp);
2070 __ MultiPop(fp.bit() | ra.bit());
Ben Murdoch097c5b22016-05-18 11:27:45 +01002071 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002072 // Adjust for the receiver.
2073 __ Addu(sp, sp, Operand(kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00002074}
2075
2076
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002077// static
2078void Builtins::Generate_Apply(MacroAssembler* masm) {
2079 // ----------- S t a t e -------------
2080 // -- a0 : argumentsList
2081 // -- a1 : target
2082 // -- a3 : new.target (checked to be constructor or undefined)
2083 // -- sp[0] : thisArgument
2084 // -----------------------------------
2085
2086 // Create the list of arguments from the array-like argumentsList.
2087 {
2088 Label create_arguments, create_array, create_runtime, done_create;
2089 __ JumpIfSmi(a0, &create_runtime);
2090
2091 // Load the map of argumentsList into a2.
2092 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
2093
2094 // Load native context into t0.
2095 __ lw(t0, NativeContextMemOperand());
2096
2097 // Check if argumentsList is an (unmodified) arguments object.
2098 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2099 __ Branch(&create_arguments, eq, a2, Operand(at));
2100 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX));
2101 __ Branch(&create_arguments, eq, a2, Operand(at));
2102
2103 // Check if argumentsList is a fast JSArray.
2104 __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
2105 __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
2106 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2107
2108 // Ask the runtime to create the list (actually a FixedArray).
2109 __ bind(&create_runtime);
2110 {
2111 FrameScope scope(masm, StackFrame::INTERNAL);
2112 __ Push(a1, a3, a0);
2113 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2114 __ mov(a0, v0);
2115 __ Pop(a1, a3);
2116 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
2117 __ SmiUntag(a2);
2118 }
2119 __ Branch(&done_create);
2120
2121 // Try to create the list from an arguments object.
2122 __ bind(&create_arguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002123 __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002124 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
2125 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset));
2126 __ Branch(&create_runtime, ne, a2, Operand(at));
2127 __ SmiUntag(a2);
2128 __ mov(a0, t0);
2129 __ Branch(&done_create);
2130
2131 // Try to create the list from a JSArray object.
2132 __ bind(&create_array);
2133 __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset));
2134 __ DecodeField<Map::ElementsKindBits>(a2);
2135 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2136 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2137 STATIC_ASSERT(FAST_ELEMENTS == 2);
2138 __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
2139 __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2140 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2141 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2142 __ SmiUntag(a2);
2143
2144 __ bind(&done_create);
2145 }
2146
2147 // Check for stack overflow.
2148 {
2149 // Check the stack for overflow. We are not trying to catch interruptions
2150 // (i.e. debug break and preemption) here, so check the "real stack limit".
2151 Label done;
2152 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex);
2153 // Make ip the space we have left. The stack might already be overflowed
2154 // here which will cause ip to become negative.
2155 __ Subu(t0, sp, t0);
2156 // Check if the arguments will overflow the stack.
2157 __ sll(at, a2, kPointerSizeLog2);
2158 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison.
2159 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2160 __ bind(&done);
2161 }
2162
2163 // ----------- S t a t e -------------
2164 // -- a1 : target
2165 // -- a0 : args (a FixedArray built from argumentsList)
2166 // -- a2 : len (number of elements to push from args)
2167 // -- a3 : new.target (checked to be constructor or undefined)
2168 // -- sp[0] : thisArgument
2169 // -----------------------------------
2170
2171 // Push arguments onto the stack (thisArgument is already on the stack).
2172 {
2173 __ mov(t0, zero_reg);
2174 Label done, loop;
2175 __ bind(&loop);
2176 __ Branch(&done, eq, t0, Operand(a2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002177 __ Lsa(at, a0, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002178 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize));
2179 __ Push(at);
2180 __ Addu(t0, t0, Operand(1));
2181 __ Branch(&loop);
2182 __ bind(&done);
2183 __ Move(a0, t0);
2184 }
2185
2186 // Dispatch to Call or Construct depending on whether new.target is undefined.
2187 {
2188 Label construct;
2189 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2190 __ Branch(&construct, ne, a3, Operand(at));
2191 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2192 __ bind(&construct);
2193 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2194 }
2195}
2196
Ben Murdoch097c5b22016-05-18 11:27:45 +01002197namespace {
2198
2199// Drops top JavaScript frame and an arguments adaptor frame below it (if
2200// present) preserving all the arguments prepared for current call.
2201// Does nothing if debugger is currently active.
2202// ES6 14.6.3. PrepareForTailCall
2203//
2204// Stack structure for the function g() tail calling f():
2205//
2206// ------- Caller frame: -------
2207// | ...
2208// | g()'s arg M
2209// | ...
2210// | g()'s arg 1
2211// | g()'s receiver arg
2212// | g()'s caller pc
2213// ------- g()'s frame: -------
2214// | g()'s caller fp <- fp
2215// | g()'s context
2216// | function pointer: g
2217// | -------------------------
2218// | ...
2219// | ...
2220// | f()'s arg N
2221// | ...
2222// | f()'s arg 1
2223// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2224// ----------------------
2225//
2226void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2227 Register scratch1, Register scratch2,
2228 Register scratch3) {
2229 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2230 Comment cmnt(masm, "[ PrepareForTailCall");
2231
Ben Murdochda12d292016-06-02 14:46:10 +01002232 // Prepare for tail call only if ES2015 tail call elimination is enabled.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002233 Label done;
Ben Murdochda12d292016-06-02 14:46:10 +01002234 ExternalReference is_tail_call_elimination_enabled =
2235 ExternalReference::is_tail_call_elimination_enabled_address(
2236 masm->isolate());
2237 __ li(at, Operand(is_tail_call_elimination_enabled));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002238 __ lb(scratch1, MemOperand(at));
Ben Murdochda12d292016-06-02 14:46:10 +01002239 __ Branch(&done, eq, scratch1, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002240
2241 // Drop possible interpreter handler/stub frame.
2242 {
2243 Label no_interpreter_frame;
Ben Murdochda12d292016-06-02 14:46:10 +01002244 __ lw(scratch3,
2245 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002246 __ Branch(&no_interpreter_frame, ne, scratch3,
2247 Operand(Smi::FromInt(StackFrame::STUB)));
2248 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2249 __ bind(&no_interpreter_frame);
2250 }
2251
2252 // Check if next frame is an arguments adaptor frame.
Ben Murdochda12d292016-06-02 14:46:10 +01002253 Register caller_args_count_reg = scratch1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002254 Label no_arguments_adaptor, formal_parameter_count_loaded;
2255 __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002256 __ lw(scratch3,
2257 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002258 __ Branch(&no_arguments_adaptor, ne, scratch3,
2259 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2260
Ben Murdochda12d292016-06-02 14:46:10 +01002261 // Drop current frame and load arguments count from arguments adaptor frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002262 __ mov(fp, scratch2);
Ben Murdochda12d292016-06-02 14:46:10 +01002263 __ lw(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002264 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002265 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002266 __ Branch(&formal_parameter_count_loaded);
2267
2268 __ bind(&no_arguments_adaptor);
2269 // Load caller's formal parameter count
Ben Murdochda12d292016-06-02 14:46:10 +01002270 __ lw(scratch1,
2271 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002272 __ lw(scratch1,
2273 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002274 __ lw(caller_args_count_reg,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002275 FieldMemOperand(scratch1,
2276 SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01002277 __ SmiUntag(caller_args_count_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002278
2279 __ bind(&formal_parameter_count_loaded);
2280
Ben Murdochda12d292016-06-02 14:46:10 +01002281 ParameterCount callee_args_count(args_reg);
2282 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2283 scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002284 __ bind(&done);
2285}
2286} // namespace
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002287
2288// static
2289void Builtins::Generate_CallFunction(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +01002290 ConvertReceiverMode mode,
2291 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292 // ----------- S t a t e -------------
2293 // -- a0 : the number of arguments (not including the receiver)
2294 // -- a1 : the function to call (checked to be a JSFunction)
2295 // -----------------------------------
2296 __ AssertFunction(a1);
2297
2298 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2299 // Check that the function is not a "classConstructor".
2300 Label class_constructor;
2301 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2302 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2303 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2304 __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2305
2306 // Enter the context of the function; ToObject has to run in the function
2307 // context, and we also need to take the global proxy from the function
2308 // context in case of conversion.
2309 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2310 SharedFunctionInfo::kStrictModeByteOffset);
2311 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2312 // We need to convert the receiver for non-native sloppy mode functions.
2313 Label done_convert;
2314 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2315 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2316 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2317 __ Branch(&done_convert, ne, at, Operand(zero_reg));
2318 {
2319 // ----------- S t a t e -------------
2320 // -- a0 : the number of arguments (not including the receiver)
2321 // -- a1 : the function to call (checked to be a JSFunction)
2322 // -- a2 : the shared function info.
2323 // -- cp : the function context.
2324 // -----------------------------------
2325
2326 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2327 // Patch receiver to global proxy.
2328 __ LoadGlobalProxy(a3);
2329 } else {
2330 Label convert_to_object, convert_receiver;
Ben Murdoch097c5b22016-05-18 11:27:45 +01002331 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002332 __ lw(a3, MemOperand(at));
2333 __ JumpIfSmi(a3, &convert_to_object);
2334 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2335 __ GetObjectType(a3, t0, t0);
2336 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
2337 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2338 Label convert_global_proxy;
2339 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2340 &convert_global_proxy);
2341 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2342 __ bind(&convert_global_proxy);
2343 {
2344 // Patch receiver to global proxy.
2345 __ LoadGlobalProxy(a3);
2346 }
2347 __ Branch(&convert_receiver);
2348 }
2349 __ bind(&convert_to_object);
2350 {
2351 // Convert receiver using ToObject.
2352 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2353 // in the fast case? (fall back to AllocateInNewSpace?)
2354 FrameScope scope(masm, StackFrame::INTERNAL);
2355 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
2356 __ Push(a0, a1);
2357 __ mov(a0, a3);
2358 ToObjectStub stub(masm->isolate());
2359 __ CallStub(&stub);
2360 __ mov(a3, v0);
2361 __ Pop(a0, a1);
2362 __ sra(a0, a0, kSmiTagSize); // Un-tag.
2363 }
2364 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2365 __ bind(&convert_receiver);
2366 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002367 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002368 __ sw(a3, MemOperand(at));
2369 }
2370 __ bind(&done_convert);
2371
2372 // ----------- S t a t e -------------
2373 // -- a0 : the number of arguments (not including the receiver)
2374 // -- a1 : the function to call (checked to be a JSFunction)
2375 // -- a2 : the shared function info.
2376 // -- cp : the function context.
2377 // -----------------------------------
2378
Ben Murdoch097c5b22016-05-18 11:27:45 +01002379 if (tail_call_mode == TailCallMode::kAllow) {
2380 PrepareForTailCall(masm, a0, t0, t1, t2);
2381 }
2382
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002383 __ lw(a2,
2384 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2385 __ sra(a2, a2, kSmiTagSize); // Un-tag.
2386 ParameterCount actual(a0);
2387 ParameterCount expected(a2);
2388 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2389 CheckDebugStepCallWrapper());
2390
2391 // The function is a "classConstructor", need to raise an exception.
2392 __ bind(&class_constructor);
2393 {
2394 FrameScope frame(masm, StackFrame::INTERNAL);
2395 __ Push(a1);
2396 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2397 }
2398}
2399
2400
2401// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002402void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2403 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002404 // ----------- S t a t e -------------
2405 // -- a0 : the number of arguments (not including the receiver)
2406 // -- a1 : the function to call (checked to be a JSBoundFunction)
2407 // -----------------------------------
2408 __ AssertBoundFunction(a1);
2409
Ben Murdoch097c5b22016-05-18 11:27:45 +01002410 if (tail_call_mode == TailCallMode::kAllow) {
2411 PrepareForTailCall(masm, a0, t0, t1, t2);
2412 }
2413
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002414 // Patch the receiver to [[BoundThis]].
2415 {
2416 __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002417 __ Lsa(t0, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002418 __ sw(at, MemOperand(t0));
2419 }
2420
2421 // Load [[BoundArguments]] into a2 and length of that into t0.
2422 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2423 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2424 __ SmiUntag(t0);
2425
2426 // ----------- S t a t e -------------
2427 // -- a0 : the number of arguments (not including the receiver)
2428 // -- a1 : the function to call (checked to be a JSBoundFunction)
2429 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2430 // -- t0 : the number of [[BoundArguments]]
2431 // -----------------------------------
2432
2433 // Reserve stack space for the [[BoundArguments]].
2434 {
2435 Label done;
2436 __ sll(t1, t0, kPointerSizeLog2);
2437 __ Subu(sp, sp, Operand(t1));
2438 // Check the stack for overflow. We are not trying to catch interruptions
2439 // (i.e. debug break and preemption) here, so check the "real stack limit".
2440 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2441 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2442 // Restore the stack pointer.
2443 __ Addu(sp, sp, Operand(t1));
2444 {
2445 FrameScope scope(masm, StackFrame::MANUAL);
2446 __ EnterFrame(StackFrame::INTERNAL);
2447 __ CallRuntime(Runtime::kThrowStackOverflow);
2448 }
2449 __ bind(&done);
2450 }
2451
2452 // Relocate arguments down the stack.
2453 {
2454 Label loop, done_loop;
2455 __ mov(t1, zero_reg);
2456 __ bind(&loop);
2457 __ Branch(&done_loop, gt, t1, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002458 __ Lsa(t2, sp, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002459 __ lw(at, MemOperand(t2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002460 __ Lsa(t2, sp, t1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002461 __ sw(at, MemOperand(t2));
2462 __ Addu(t0, t0, Operand(1));
2463 __ Addu(t1, t1, Operand(1));
2464 __ Branch(&loop);
2465 __ bind(&done_loop);
2466 }
2467
2468 // Copy [[BoundArguments]] to the stack (below the arguments).
2469 {
2470 Label loop, done_loop;
2471 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2472 __ SmiUntag(t0);
2473 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2474 __ bind(&loop);
2475 __ Subu(t0, t0, Operand(1));
2476 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002477 __ Lsa(t1, a2, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002478 __ lw(at, MemOperand(t1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002479 __ Lsa(t1, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002480 __ sw(at, MemOperand(t1));
2481 __ Addu(a0, a0, Operand(1));
2482 __ Branch(&loop);
2483 __ bind(&done_loop);
2484 }
2485
2486 // Call the [[BoundTargetFunction]] via the Call builtin.
2487 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2488 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2489 masm->isolate())));
2490 __ lw(at, MemOperand(at));
2491 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2492 __ Jump(at);
2493}
2494
2495
2496// static
Ben Murdoch097c5b22016-05-18 11:27:45 +01002497void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2498 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002499 // ----------- S t a t e -------------
2500 // -- a0 : the number of arguments (not including the receiver)
2501 // -- a1 : the target to call (can be any Object).
2502 // -----------------------------------
2503
2504 Label non_callable, non_function, non_smi;
2505 __ JumpIfSmi(a1, &non_callable);
2506 __ bind(&non_smi);
2507 __ GetObjectType(a1, t1, t2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002508 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002509 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002510 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002511 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002512
2513 // Check if target has a [[Call]] internal method.
2514 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2515 __ And(t1, t1, Operand(1 << Map::kIsCallable));
2516 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2517
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002518 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2519
Ben Murdoch097c5b22016-05-18 11:27:45 +01002520 // 0. Prepare for tail call if necessary.
2521 if (tail_call_mode == TailCallMode::kAllow) {
2522 PrepareForTailCall(masm, a0, t0, t1, t2);
2523 }
2524
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002525 // 1. Runtime fallback for Proxy [[Call]].
2526 __ Push(a1);
2527 // Increase the arguments size to include the pushed function and the
2528 // existing receiver on the stack.
2529 __ Addu(a0, a0, 2);
2530 // Tail-call to the runtime.
2531 __ JumpToExternalReference(
2532 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2533
2534 // 2. Call to something else, which might have a [[Call]] internal method (if
2535 // not we raise an exception).
2536 __ bind(&non_function);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002537 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002538 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002539 __ sw(a1, MemOperand(at));
2540 // Let the "call_as_function_delegate" take care of the rest.
2541 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2542 __ Jump(masm->isolate()->builtins()->CallFunction(
Ben Murdoch097c5b22016-05-18 11:27:45 +01002543 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002544 RelocInfo::CODE_TARGET);
2545
2546 // 3. Call to something that is not callable.
2547 __ bind(&non_callable);
2548 {
2549 FrameScope scope(masm, StackFrame::INTERNAL);
2550 __ Push(a1);
2551 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2552 }
2553}
2554
2555
2556// static
2557void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2558 // ----------- S t a t e -------------
2559 // -- a0 : the number of arguments (not including the receiver)
2560 // -- a1 : the constructor to call (checked to be a JSFunction)
2561 // -- a3 : the new target (checked to be a constructor)
2562 // -----------------------------------
2563 __ AssertFunction(a1);
2564
2565 // Calling convention for function specific ConstructStubs require
2566 // a2 to contain either an AllocationSite or undefined.
2567 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2568
2569 // Tail call to the function-specific construct stub (still in the caller
2570 // context at this point).
2571 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2572 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
2573 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
2574 __ Jump(at);
2575}
2576
2577
2578// static
2579void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2580 // ----------- S t a t e -------------
2581 // -- a0 : the number of arguments (not including the receiver)
2582 // -- a1 : the function to call (checked to be a JSBoundFunction)
2583 // -- a3 : the new target (checked to be a constructor)
2584 // -----------------------------------
2585 __ AssertBoundFunction(a1);
2586
2587 // Load [[BoundArguments]] into a2 and length of that into t0.
2588 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2589 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2590 __ SmiUntag(t0);
2591
2592 // ----------- S t a t e -------------
2593 // -- a0 : the number of arguments (not including the receiver)
2594 // -- a1 : the function to call (checked to be a JSBoundFunction)
2595 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2596 // -- a3 : the new target (checked to be a constructor)
2597 // -- t0 : the number of [[BoundArguments]]
2598 // -----------------------------------
2599
2600 // Reserve stack space for the [[BoundArguments]].
2601 {
2602 Label done;
2603 __ sll(t1, t0, kPointerSizeLog2);
2604 __ Subu(sp, sp, Operand(t1));
2605 // Check the stack for overflow. We are not trying to catch interruptions
2606 // (i.e. debug break and preemption) here, so check the "real stack limit".
2607 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2608 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2609 // Restore the stack pointer.
2610 __ Addu(sp, sp, Operand(t1));
2611 {
2612 FrameScope scope(masm, StackFrame::MANUAL);
2613 __ EnterFrame(StackFrame::INTERNAL);
2614 __ CallRuntime(Runtime::kThrowStackOverflow);
2615 }
2616 __ bind(&done);
2617 }
2618
2619 // Relocate arguments down the stack.
2620 {
2621 Label loop, done_loop;
2622 __ mov(t1, zero_reg);
2623 __ bind(&loop);
2624 __ Branch(&done_loop, ge, t1, Operand(a0));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002625 __ Lsa(t2, sp, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002626 __ lw(at, MemOperand(t2));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002627 __ Lsa(t2, sp, t1, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002628 __ sw(at, MemOperand(t2));
2629 __ Addu(t0, t0, Operand(1));
2630 __ Addu(t1, t1, Operand(1));
2631 __ Branch(&loop);
2632 __ bind(&done_loop);
2633 }
2634
2635 // Copy [[BoundArguments]] to the stack (below the arguments).
2636 {
2637 Label loop, done_loop;
2638 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2639 __ SmiUntag(t0);
2640 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2641 __ bind(&loop);
2642 __ Subu(t0, t0, Operand(1));
2643 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002644 __ Lsa(t1, a2, t0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002645 __ lw(at, MemOperand(t1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002646 __ Lsa(t1, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002647 __ sw(at, MemOperand(t1));
2648 __ Addu(a0, a0, Operand(1));
2649 __ Branch(&loop);
2650 __ bind(&done_loop);
2651 }
2652
2653 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2654 {
2655 Label skip_load;
2656 __ Branch(&skip_load, ne, a1, Operand(a3));
2657 __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2658 __ bind(&skip_load);
2659 }
2660
2661 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2662 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2663 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2664 __ lw(at, MemOperand(at));
2665 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2666 __ Jump(at);
2667}
2668
2669
2670// static
2671void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2672 // ----------- S t a t e -------------
2673 // -- a0 : the number of arguments (not including the receiver)
2674 // -- a1 : the constructor to call (checked to be a JSProxy)
2675 // -- a3 : the new target (either the same as the constructor or
2676 // the JSFunction on which new was invoked initially)
2677 // -----------------------------------
2678
2679 // Call into the Runtime for Proxy [[Construct]].
2680 __ Push(a1, a3);
2681 // Include the pushed new_target, constructor and the receiver.
2682 __ Addu(a0, a0, Operand(3));
2683 // Tail-call to the runtime.
2684 __ JumpToExternalReference(
2685 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2686}
2687
2688
2689// static
2690void Builtins::Generate_Construct(MacroAssembler* masm) {
2691 // ----------- S t a t e -------------
2692 // -- a0 : the number of arguments (not including the receiver)
2693 // -- a1 : the constructor to call (can be any Object)
2694 // -- a3 : the new target (either the same as the constructor or
2695 // the JSFunction on which new was invoked initially)
2696 // -----------------------------------
2697
2698 // Check if target is a Smi.
2699 Label non_constructor;
2700 __ JumpIfSmi(a1, &non_constructor);
2701
2702 // Dispatch based on instance type.
2703 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2704 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2705 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2706 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2707
2708 // Check if target has a [[Construct]] internal method.
2709 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2710 __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2711 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2712
2713 // Only dispatch to bound functions after checking whether they are
2714 // constructors.
2715 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2716 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2717
2718 // Only dispatch to proxies after checking whether they are constructors.
2719 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2720 eq, t2, Operand(JS_PROXY_TYPE));
2721
2722 // Called Construct on an exotic Object with a [[Construct]] internal method.
2723 {
2724 // Overwrite the original receiver with the (original) target.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002725 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002726 __ sw(a1, MemOperand(at));
2727 // Let the "call_as_constructor_delegate" take care of the rest.
2728 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2729 __ Jump(masm->isolate()->builtins()->CallFunction(),
2730 RelocInfo::CODE_TARGET);
2731 }
2732
2733 // Called Construct on an Object that doesn't have a [[Construct]] internal
2734 // method.
2735 __ bind(&non_constructor);
2736 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2737 RelocInfo::CODE_TARGET);
2738}
2739
Ben Murdochc5610432016-08-08 18:44:38 +01002740// static
2741void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2742 // ----------- S t a t e -------------
2743 // -- a0 : requested object size (untagged)
2744 // -- ra : return address
2745 // -----------------------------------
2746 __ SmiTag(a0);
2747 __ Push(a0);
2748 __ Move(cp, Smi::FromInt(0));
2749 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2750}
2751
2752// static
2753void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2754 // ----------- S t a t e -------------
2755 // -- a0 : requested object size (untagged)
2756 // -- ra : return address
2757 // -----------------------------------
2758 __ SmiTag(a0);
2759 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2760 __ Push(a0, a1);
2761 __ Move(cp, Smi::FromInt(0));
2762 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2763}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002764
Ben Murdoch61f157c2016-09-16 13:49:30 +01002765// static
2766void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2767 // The StringToNumber stub takes on argument in a0.
2768 __ AssertString(a0);
2769
2770 // Check if string has a cached array index.
2771 Label runtime;
2772 __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset));
2773 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
2774 __ Branch(&runtime, ne, at, Operand(zero_reg));
2775 __ IndexFromHash(a2, v0);
2776 __ Ret();
2777
2778 __ bind(&runtime);
2779 {
2780 FrameScope frame(masm, StackFrame::INTERNAL);
2781 // Push argument.
2782 __ Push(a0);
2783 // We cannot use a tail call here because this builtin can also be called
2784 // from wasm.
2785 __ CallRuntime(Runtime::kStringToNumber);
2786 }
2787 __ Ret();
2788}
2789
2790// static
2791void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2792 // The ToNumber stub takes one argument in a0.
2793 Label not_smi;
2794 __ JumpIfNotSmi(a0, &not_smi);
2795 __ Ret(USE_DELAY_SLOT);
2796 __ mov(v0, a0);
2797 __ bind(&not_smi);
2798
2799 Label not_heap_number;
2800 __ GetObjectType(a0, a1, a1);
2801 // a0: receiver
2802 // a1: receiver instance type
2803 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2804 __ Ret(USE_DELAY_SLOT);
2805 __ mov(v0, a0);
2806 __ bind(&not_heap_number);
2807
2808 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2809 RelocInfo::CODE_TARGET);
2810}
2811
2812// static
2813void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2814 // The NonNumberToNumber stub takes on argument in a0.
2815 __ AssertNotNumber(a0);
2816
2817 Label not_string;
2818 __ GetObjectType(a0, a1, a1);
2819 // a0: receiver
2820 // a1: receiver instance type
2821 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
2822 __ Jump(masm->isolate()->builtins()->StringToNumber(),
2823 RelocInfo::CODE_TARGET);
2824 __ bind(&not_string);
2825
2826 Label not_oddball;
2827 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2828 __ Ret(USE_DELAY_SLOT);
2829 __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); // In delay slot.
2830 __ bind(&not_oddball);
2831 {
2832 FrameScope frame(masm, StackFrame::INTERNAL);
2833 // Push argument.
2834 __ Push(a0);
2835 // We cannot use a tail call here because this builtin can also be called
2836 // from wasm.
2837 __ CallRuntime(Runtime::kToNumber);
2838 }
2839 __ Ret();
2840}
2841
Andrei Popescu31002712010-02-23 13:46:05 +00002842void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002843 // State setup as expected by MacroAssembler::InvokePrologue.
2844 // ----------- S t a t e -------------
2845 // -- a0: actual arguments count
2846 // -- a1: function (passed through to callee)
2847 // -- a2: expected arguments count
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002848 // -- a3: new target (passed through to callee)
Ben Murdoch257744e2011-11-30 15:57:28 +00002849 // -----------------------------------
2850
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002851 Label invoke, dont_adapt_arguments, stack_overflow;
Ben Murdoch257744e2011-11-30 15:57:28 +00002852
2853 Label enough, too_few;
2854 __ Branch(&dont_adapt_arguments, eq,
2855 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2856 // We use Uless as the number of argument should always be greater than 0.
2857 __ Branch(&too_few, Uless, a0, Operand(a2));
2858
2859 { // Enough parameters: actual >= expected.
2860 // a0: actual number of arguments as a smi
2861 // a1: function
2862 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002863 // a3: new target (passed through to callee)
Ben Murdoch257744e2011-11-30 15:57:28 +00002864 __ bind(&enough);
2865 EnterArgumentsAdaptorFrame(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002866 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00002867
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002868 // Calculate copy start address into a0 and copy end address into t1.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002869 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002870 // Adjust for return address and receiver.
2871 __ Addu(a0, a0, Operand(2 * kPointerSize));
2872 // Compute copy end address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002873 __ sll(t1, a2, kPointerSizeLog2);
2874 __ subu(t1, a0, t1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002875
2876 // Copy the arguments (including the receiver) to the new stack frame.
2877 // a0: copy start address
2878 // a1: function
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002879 // a2: expected number of arguments
2880 // a3: new target (passed through to callee)
2881 // t1: copy end address
Ben Murdoch257744e2011-11-30 15:57:28 +00002882
2883 Label copy;
2884 __ bind(&copy);
2885 __ lw(t0, MemOperand(a0));
2886 __ push(t0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002887 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t1));
Ben Murdoch257744e2011-11-30 15:57:28 +00002888 __ addiu(a0, a0, -kPointerSize); // In delay slot.
2889
2890 __ jmp(&invoke);
2891 }
2892
2893 { // Too few parameters: Actual < expected.
2894 __ bind(&too_few);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002895 EnterArgumentsAdaptorFrame(masm);
2896 ArgumentAdaptorStackCheck(masm, &stack_overflow);
2897
2898 // Calculate copy start address into a0 and copy end address into t3.
Ben Murdoch257744e2011-11-30 15:57:28 +00002899 // a0: actual number of arguments as a smi
2900 // a1: function
2901 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002902 // a3: new target (passed through to callee)
Ben Murdoch097c5b22016-05-18 11:27:45 +01002903 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002904 // Adjust for return address and receiver.
2905 __ Addu(a0, a0, Operand(2 * kPointerSize));
2906 // Compute copy end address. Also adjust for return address.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002907 __ Addu(t3, fp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002908
2909 // Copy the arguments (including the receiver) to the new stack frame.
2910 // a0: copy start address
2911 // a1: function
2912 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002913 // a3: new target (passed through to callee)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002914 // t3: copy end address
Ben Murdoch257744e2011-11-30 15:57:28 +00002915 Label copy;
2916 __ bind(&copy);
2917 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002918 __ Subu(sp, sp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002919 __ Subu(a0, a0, kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002920 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
2921 __ sw(t0, MemOperand(sp)); // In the delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00002922
2923 // Fill the remaining expected arguments with undefined.
2924 // a1: function
2925 // a2: expected number of arguments
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002926 // a3: new target (passed through to callee)
Ben Murdoch257744e2011-11-30 15:57:28 +00002927 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
2928 __ sll(t2, a2, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002929 __ Subu(t1, fp, Operand(t2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002930 // Adjust for frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002931 __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002932 2 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002933
2934 Label fill;
2935 __ bind(&fill);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002936 __ Subu(sp, sp, kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002937 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002938 __ sw(t0, MemOperand(sp));
Ben Murdoch257744e2011-11-30 15:57:28 +00002939 }
2940
2941 // Call the entry point.
2942 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002943 __ mov(a0, a2);
2944 // a0 : expected number of arguments
2945 // a1 : function (passed through to callee)
2946 // a3 : new target (passed through to callee)
2947 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2948 __ Call(t0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002949
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002950 // Store offset of return address for deoptimizer.
2951 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2952
Ben Murdoch257744e2011-11-30 15:57:28 +00002953 // Exit frame and return.
2954 LeaveArgumentsAdaptorFrame(masm);
2955 __ Ret();
2956
2957
2958 // -------------------------------------------
2959 // Don't adapt arguments.
2960 // -------------------------------------------
2961 __ bind(&dont_adapt_arguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002962 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2963 __ Jump(t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002964
2965 __ bind(&stack_overflow);
2966 {
2967 FrameScope frame(masm, StackFrame::MANUAL);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002968 __ CallRuntime(Runtime::kThrowStackOverflow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002969 __ break_(0xCC);
2970 }
Andrei Popescu31002712010-02-23 13:46:05 +00002971}
2972
2973
2974#undef __
2975
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002976} // namespace internal
2977} // namespace v8
Andrei Popescu31002712010-02-23 13:46:05 +00002978
Leon Clarkef7060e22010-06-03 12:02:55 +01002979#endif // V8_TARGET_ARCH_MIPS