blob: 100195b58d48bb6a97871ce9238369cd4d7c7ad9 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
5
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/v8.h"
Andrei Popescu31002712010-02-23 13:46:05 +00008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +010010
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/codegen.h"
12#include "src/debug.h"
13#include "src/deoptimizer.h"
14#include "src/full-codegen.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000016
Andrei Popescu31002712010-02-23 13:46:05 +000017
18namespace v8 {
19namespace internal {
20
21
22#define __ ACCESS_MASM(masm)
23
24
25void Builtins::Generate_Adaptor(MacroAssembler* masm,
26 CFunctionId id,
27 BuiltinExtraArguments extra_args) {
Ben Murdoch257744e2011-11-30 15:57:28 +000028 // ----------- S t a t e -------------
29 // -- a0 : number of arguments excluding receiver
30 // -- a1 : called function (only guaranteed when
31 // -- extra_args requires it)
32 // -- cp : context
33 // -- sp[0] : last argument
34 // -- ...
35 // -- sp[4 * (argc - 1)] : first argument
36 // -- sp[4 * agrc] : receiver
37 // -----------------------------------
38
39 // Insert extra arguments.
40 int num_extra_args = 0;
41 if (extra_args == NEEDS_CALLED_FUNCTION) {
42 num_extra_args = 1;
43 __ push(a1);
44 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
Ben Murdoch257744e2011-11-30 15:57:28 +000046 }
47
Emily Bernierd0a1eb72015-03-24 16:35:39 -040048 // JumpToExternalReference expects a0 to contain the number of arguments
Ben Murdoch257744e2011-11-30 15:57:28 +000049 // including the receiver and the extra arguments.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050 __ Addu(a0, a0, num_extra_args + 1);
Ben Murdoch257744e2011-11-30 15:57:28 +000051 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
52}
53
54
Ben Murdoch3ef787d2012-04-12 10:51:47 +010055// Load the built-in InternalArray function from the current context.
56static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
57 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000058 // Load the native context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +010059
Ben Murdoch3ef787d2012-04-12 10:51:47 +010060 __ lw(result,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000061 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
62 __ lw(result,
63 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
64 // Load the InternalArray function from the native context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +010065 __ lw(result,
66 MemOperand(result,
67 Context::SlotOffset(
68 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
69}
70
71
Ben Murdoch257744e2011-11-30 15:57:28 +000072// Load the built-in Array function from the current context.
73static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 // Load the native context.
Ben Murdoch257744e2011-11-30 15:57:28 +000075
Ben Murdoch257744e2011-11-30 15:57:28 +000076 __ lw(result,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
78 __ lw(result,
79 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
80 // Load the Array function from the native context.
Ben Murdoch257744e2011-11-30 15:57:28 +000081 __ lw(result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +010082 MemOperand(result,
83 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
Ben Murdoch257744e2011-11-30 15:57:28 +000084}
85
86
Ben Murdoch3ef787d2012-04-12 10:51:47 +010087void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
88 // ----------- S t a t e -------------
89 // -- a0 : number of arguments
90 // -- ra : return address
91 // -- sp[...]: constructor arguments
92 // -----------------------------------
93 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
94
95 // Get the InternalArray function.
96 GenerateLoadInternalArrayFunction(masm, a1);
97
98 if (FLAG_debug_code) {
99 // Initial map for the builtin InternalArray functions should be maps.
100 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101 __ SmiTst(a2, t0);
102 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100103 t0, Operand(zero_reg));
104 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100106 t0, Operand(MAP_TYPE));
107 }
108
109 // Run the native code for the InternalArray function called as a normal
110 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000111 // Tail call a stub.
112 InternalArrayConstructorStub stub(masm->isolate());
113 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +0000114}
115
116
117void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000118 // ----------- S t a t e -------------
119 // -- a0 : number of arguments
120 // -- ra : return address
121 // -- sp[...]: constructor arguments
122 // -----------------------------------
123 Label generic_array_code;
124
125 // Get the Array function.
126 GenerateLoadArrayFunction(masm, a1);
127
128 if (FLAG_debug_code) {
129 // Initial map for the builtin Array functions should be maps.
130 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 __ SmiTst(a2, t0);
132 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
Ben Murdoch257744e2011-11-30 15:57:28 +0000133 t0, Operand(zero_reg));
134 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
Ben Murdoch257744e2011-11-30 15:57:28 +0000136 t0, Operand(MAP_TYPE));
137 }
138
139 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 // Tail call a stub.
141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 ArrayConstructorStub stub(masm->isolate());
143 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +0000144}
145
146
Steve Block44f0eee2011-05-26 01:26:41 +0100147void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000148 // ----------- S t a t e -------------
149 // -- a0 : number of arguments
150 // -- a1 : constructor function
151 // -- ra : return address
152 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
153 // -- sp[argc * 4] : receiver
154 // -----------------------------------
155 Counters* counters = masm->isolate()->counters();
156 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
157
158 Register function = a1;
159 if (FLAG_debug_code) {
160 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000162 }
163
164 // Load the first arguments in a0 and get rid of the rest.
165 Label no_arguments;
166 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
167 // First args = sp[(argc - 1) * 4].
168 __ Subu(a0, a0, Operand(1));
169 __ sll(a0, a0, kPointerSizeLog2);
170 __ Addu(sp, a0, sp);
171 __ lw(a0, MemOperand(sp));
172 // sp now point to args[0], drop args[0] + receiver.
173 __ Drop(2);
174
175 Register argument = a2;
176 Label not_cached, argument_is_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000177 __ LookupNumberStringCache(a0, // Input.
178 argument, // Result.
179 a3, // Scratch.
180 t0, // Scratch.
181 t1, // Scratch.
182 &not_cached);
Ben Murdoch257744e2011-11-30 15:57:28 +0000183 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
184 __ bind(&argument_is_string);
185
186 // ----------- S t a t e -------------
187 // -- a2 : argument converted to string
188 // -- a1 : constructor function
189 // -- ra : return address
190 // -----------------------------------
191
192 Label gc_required;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 __ Allocate(JSValue::kSize,
194 v0, // Result.
195 a3, // Scratch.
196 t0, // Scratch.
197 &gc_required,
198 TAG_OBJECT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000199
200 // Initialising the String Object.
201 Register map = a3;
202 __ LoadGlobalFunctionInitialMap(function, map, t0);
203 if (FLAG_debug_code) {
204 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000205 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
Ben Murdoch257744e2011-11-30 15:57:28 +0000206 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
207 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000208 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
Ben Murdoch257744e2011-11-30 15:57:28 +0000209 t0, Operand(zero_reg));
210 }
211 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
212
213 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
214 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
215 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
216
217 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
218
219 // Ensure the object is fully initialized.
220 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
221
222 __ Ret();
223
224 // The argument was not found in the number to string cache. Check
225 // if it's a string already before calling the conversion builtin.
226 Label convert_argument;
227 __ bind(&not_cached);
228 __ JumpIfSmi(a0, &convert_argument);
229
230 // Is it a String?
231 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
232 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +0000233 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000234 __ And(t0, a3, Operand(kIsNotStringMask));
235 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
236 __ mov(argument, a0);
237 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
238 __ Branch(&argument_is_string);
239
240 // Invoke the conversion builtin and put the result into a2.
241 __ bind(&convert_argument);
242 __ push(function); // Preserve the function.
243 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100244 {
245 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246 __ push(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100247 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
248 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000249 __ pop(function);
250 __ mov(argument, v0);
251 __ Branch(&argument_is_string);
252
253 // Load the empty string into a2, remove the receiver from the
254 // stack, and jump back to the case where the argument is a string.
255 __ bind(&no_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000257 __ Drop(1);
258 __ Branch(&argument_is_string);
259
260 // At this point the argument is already a string. Call runtime to
261 // create a string wrapper.
262 __ bind(&gc_required);
263 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100264 {
265 FrameScope scope(masm, StackFrame::INTERNAL);
266 __ push(argument);
267 __ CallRuntime(Runtime::kNewStringWrapper, 1);
268 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000269 __ Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100270}
271
272
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000273static void CallRuntimePassFunction(
274 MacroAssembler* masm, Runtime::FunctionId function_id) {
275 FrameScope scope(masm, StackFrame::INTERNAL);
276 // Push a copy of the function onto the stack.
277 // Push call kind information and function as parameter to the runtime call.
278 __ Push(a1, a1);
279
280 __ CallRuntime(function_id, 1);
281 // Restore call kind information and receiver.
282 __ Pop(a1);
283}
284
285
286static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
287 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
288 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
289 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
290 __ Jump(at);
291}
292
293
294static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
295 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
296 __ Jump(at);
297}
298
299
300void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
301 // Checking whether the queued function is ready for install is optional,
302 // since we come across interrupts and stack checks elsewhere. However,
303 // not checking may delay installing ready functions, and always checking
304 // would be quite expensive. A good compromise is to first check against
305 // stack limit as a cue for an interrupt signal.
306 Label ok;
307 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
308 __ Branch(&ok, hs, sp, Operand(t0));
309
310 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
311 GenerateTailCallToReturnedCode(masm);
312
313 __ bind(&ok);
314 GenerateTailCallToSharedCode(masm);
315}
316
317
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100318static void Generate_JSConstructStubHelper(MacroAssembler* masm,
319 bool is_api_function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000320 bool create_memento) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000321 // ----------- S t a t e -------------
322 // -- a0 : number of arguments
323 // -- a1 : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000324 // -- a2 : allocation site or undefined
Ben Murdoch257744e2011-11-30 15:57:28 +0000325 // -- ra : return address
326 // -- sp[...]: constructor arguments
327 // -----------------------------------
328
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 // Should never create mementos for api functions.
330 DCHECK(!is_api_function || !create_memento);
Ben Murdoch257744e2011-11-30 15:57:28 +0000331
332 Isolate* isolate = masm->isolate();
333
334 // ----------- S t a t e -------------
335 // -- a0 : number of arguments
336 // -- a1 : constructor function
337 // -- ra : return address
338 // -- sp[...]: constructor arguments
339 // -----------------------------------
340
341 // Enter a construct frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100342 {
343 FrameScope scope(masm, StackFrame::CONSTRUCT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000344
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345 if (create_memento) {
346 __ AssertUndefinedOrAllocationSite(a2, a3);
347 __ push(a2);
348 }
349
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100350 // Preserve the two incoming parameters on the stack.
351 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
352 __ MultiPushReversed(a0.bit() | a1.bit());
Ben Murdoch257744e2011-11-30 15:57:28 +0000353
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100354 Label rt_call, allocated;
355 // Try to allocate the object without transitioning into C code. If any of
356 // the preconditions is not met, the code bails out to the runtime call.
357 if (FLAG_inline_new) {
358 Label undo_allocation;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100359 ExternalReference debug_step_in_fp =
360 ExternalReference::debug_step_in_fp_address(isolate);
361 __ li(a2, Operand(debug_step_in_fp));
362 __ lw(a2, MemOperand(a2));
363 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000364
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100365 // Load the initial map and verify that it is in fact a map.
366 // a1: constructor function
367 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
368 __ JumpIfSmi(a2, &rt_call);
369 __ GetObjectType(a2, a3, t4);
370 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000371
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100372 // Check that the constructor is not constructing a JSFunction (see
373 // comments in Runtime_NewObject in runtime.cc). In which case the
374 // initial map's instance type would be JS_FUNCTION_TYPE.
375 // a1: constructor function
376 // a2: initial map
377 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
378 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000379
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000380 if (!is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100381 Label allocate;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
383 // Check if slack tracking is enabled.
384 __ lw(t0, bit_field3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400385 __ DecodeField<Map::Counter>(t2, t0);
386 __ Branch(&allocate, lt, t2, Operand(Map::kSlackTrackingCounterEnd));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100387 // Decrease generous allocation count.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400388 __ Subu(t0, t0, Operand(1 << Map::Counter::kShift));
389 __ Branch(USE_DELAY_SLOT, &allocate, ne, t2,
390 Operand(Map::kSlackTrackingCounterEnd));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000391 __ sw(t0, bit_field3); // In delay slot.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100392
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000393 __ Push(a1, a2, a1); // a1 = Constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100394 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
395
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000396 __ Pop(a1, a2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400397 // Slack tracking counter is Map::kSlackTrackingCounterEnd after runtime
398 // call.
399 __ li(t2, Map::kSlackTrackingCounterEnd);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100400
401 __ bind(&allocate);
402 }
403
404 // Now allocate the JSObject on the heap.
405 // a1: constructor function
406 // a2: initial map
407 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000408 if (create_memento) {
409 __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
410 }
411
412 __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100413
414 // Allocated the JSObject, now initialize the fields. Map is set to
415 // initial map and properties and elements are set to empty fixed array.
416 // a1: constructor function
417 // a2: initial map
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 // a3: object size (not including memento if create_memento)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100419 // t4: JSObject (not tagged)
420 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
421 __ mov(t5, t4);
422 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
423 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
424 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
425 __ Addu(t5, t5, Operand(3*kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
427 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
428 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100429
430 // Fill all the in-object properties with appropriate filler.
431 // a1: constructor function
432 // a2: initial map
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000433 // a3: object size (in words, including memento if create_memento)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100434 // t4: JSObject (not tagged)
435 // t5: First in-object property of JSObject (not tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000436 // t2: slack tracking counter (non-API function case)
437 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
438
439 // Use t7 to hold undefined, which is used in several places below.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100440 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441
442 if (!is_api_function) {
443 Label no_inobject_slack_tracking;
444
445 // Check if slack tracking is enabled.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400446 __ Branch(&no_inobject_slack_tracking, lt, t2,
447 Operand(Map::kSlackTrackingCounterEnd));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000448
449 // Allocate object with a slack.
450 __ lbu(a0, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
451 __ sll(at, a0, kPointerSizeLog2);
452 __ addu(a0, t5, at);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100453 // a0: offset of first field after pre-allocated fields
454 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 __ sll(at, a3, kPointerSizeLog2);
456 __ Addu(t6, t4, Operand(at)); // End of object.
457 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100458 a0, Operand(t6));
459 }
460 __ InitializeFieldsWithFiller(t5, a0, t7);
Ben Murdoch257744e2011-11-30 15:57:28 +0000461 // To allow for truncation.
462 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 // Fill the remaining fields with one pointer filler map.
464
465 __ bind(&no_inobject_slack_tracking);
Ben Murdoch257744e2011-11-30 15:57:28 +0000466 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467
468 if (create_memento) {
469 __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
470 __ sll(a0, a0, kPointerSizeLog2);
471 __ Addu(a0, t4, Operand(a0)); // End of object.
472 __ InitializeFieldsWithFiller(t5, a0, t7);
473
474 // Fill in memento fields.
475 // t5: points to the allocated but uninitialized memento.
476 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
477 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
478 __ sw(t7, MemOperand(t5));
479 __ Addu(t5, t5, kPointerSize);
480 // Load the AllocationSite.
481 __ lw(t7, MemOperand(sp, 2 * kPointerSize));
482 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
483 __ sw(t7, MemOperand(t5));
484 __ Addu(t5, t5, kPointerSize);
485 } else {
486 __ sll(at, a3, kPointerSizeLog2);
487 __ Addu(a0, t4, Operand(at)); // End of object.
488 __ InitializeFieldsWithFiller(t5, a0, t7);
489 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100490
491 // Add the object tag to make the JSObject real, so that we can continue
492 // and jump into the continuation code at any time from now on. Any
493 // failures need to undo the allocation, so that the heap is in a
494 // consistent state and verifiable.
495 __ Addu(t4, t4, Operand(kHeapObjectTag));
496
497 // Check if a non-empty properties array is needed. Continue with
498 // allocated object if not fall through to runtime call if it is.
499 // a1: constructor function
500 // t4: JSObject
501 // t5: start of next object (not tagged)
502 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
503 // The field instance sizes contains both pre-allocated property fields
504 // and in-object properties.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 __ lbu(t6, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100506 __ Addu(a3, a3, Operand(t6));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 __ lbu(t6, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100508 __ subu(a3, a3, t6);
509
510 // Done if no extra properties are to be allocated.
511 __ Branch(&allocated, eq, a3, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512 __ Assert(greater_equal, kPropertyAllocationCountFailed,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100513 a3, Operand(zero_reg));
514
515 // Scale the number of elements by pointer size and add the header for
516 // FixedArrays to the start of the next object calculation from above.
517 // a1: constructor
518 // a3: number of elements in properties array
519 // t4: JSObject
520 // t5: start of next object
521 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522 __ Allocate(
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100523 a0,
524 t5,
525 t6,
526 a2,
527 &undo_allocation,
528 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
529
530 // Initialize the FixedArray.
531 // a1: constructor
532 // a3: number of elements in properties array (untagged)
533 // t4: JSObject
534 // t5: start of next object
535 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
536 __ mov(a2, t5);
537 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
538 __ sll(a0, a3, kSmiTagSize);
539 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
540 __ Addu(a2, a2, Operand(2 * kPointerSize));
541
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
543 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100544
545 // Initialize the fields to undefined.
546 // a1: constructor
547 // a2: First element of FixedArray (not tagged)
548 // a3: number of elements in properties array
549 // t4: JSObject
550 // t5: FixedArray (not tagged)
551 __ sll(t3, a3, kPointerSizeLog2);
552 __ addu(t6, a2, t3); // End of object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100554 { Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555 if (!is_api_function || create_memento) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100556 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
557 } else if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000558 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
559 __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100560 }
561 __ jmp(&entry);
562 __ bind(&loop);
563 __ sw(t7, MemOperand(a2));
564 __ addiu(a2, a2, kPointerSize);
565 __ bind(&entry);
566 __ Branch(&loop, less, a2, Operand(t6));
567 }
568
569 // Store the initialized FixedArray into the properties field of
570 // the JSObject.
571 // a1: constructor function
572 // t4: JSObject
573 // t5: FixedArray (not tagged)
574 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
575 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
576
577 // Continue with JSObject being successfully allocated.
578 // a1: constructor function
579 // a4: JSObject
580 __ jmp(&allocated);
581
582 // Undo the setting of the new top so that the heap is verifiable. For
583 // example, the map's unused properties potentially do not match the
584 // allocated objects unused properties.
585 // t4: JSObject (previous new top)
586 __ bind(&undo_allocation);
587 __ UndoAllocationInNewSpace(t4, t5);
Ben Murdoch257744e2011-11-30 15:57:28 +0000588 }
589
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100590 // Allocate the new receiver object using the runtime call.
Ben Murdoch85b71792012-04-11 18:30:58 +0100591 // a1: constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 __ bind(&rt_call);
593 if (create_memento) {
594 // Get the cell or allocation site.
595 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
596 __ push(a2);
597 }
598
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100599 __ push(a1); // Argument for Runtime_NewObject.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000600 if (create_memento) {
601 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
602 } else {
603 __ CallRuntime(Runtime::kNewObject, 1);
604 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100605 __ mov(t4, v0);
606
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607 // If we ended up using the runtime, and we want a memento, then the
608 // runtime call made it for us, and we shouldn't do create count
609 // increment.
610 Label count_incremented;
611 if (create_memento) {
612 __ jmp(&count_incremented);
613 }
614
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100615 // Receiver for constructor call allocated.
Ben Murdoch257744e2011-11-30 15:57:28 +0000616 // t4: JSObject
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100617 __ bind(&allocated);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618
619 if (create_memento) {
620 __ lw(a2, MemOperand(sp, kPointerSize * 2));
621 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
622 __ Branch(&count_incremented, eq, a2, Operand(t5));
623 // a2 is an AllocationSite. We are creating a memento from it, so we
624 // need to increment the memento create count.
625 __ lw(a3, FieldMemOperand(a2,
626 AllocationSite::kPretenureCreateCountOffset));
627 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
628 __ sw(a3, FieldMemOperand(a2,
629 AllocationSite::kPretenureCreateCountOffset));
630 __ bind(&count_incremented);
631 }
632
633 __ Push(t4, t4);
Ben Murdoch257744e2011-11-30 15:57:28 +0000634
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100635 // Reload the number of arguments from the stack.
636 // sp[0]: receiver
637 // sp[1]: receiver
638 // sp[2]: constructor function
639 // sp[3]: number of arguments (smi-tagged)
640 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
641 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +0000642
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100643 // Set up pointer to last argument.
644 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000645
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100646 // Set up number of arguments for function call below.
647 __ srl(a0, a3, kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +0000648
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100649 // Copy arguments and receiver to the expression stack.
650 // a0: number of arguments
651 // a1: constructor function
652 // a2: address of last argument (caller sp)
653 // a3: number of arguments (smi-tagged)
654 // sp[0]: receiver
655 // sp[1]: receiver
656 // sp[2]: constructor function
657 // sp[3]: number of arguments (smi-tagged)
658 Label loop, entry;
659 __ jmp(&entry);
660 __ bind(&loop);
661 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
662 __ Addu(t0, a2, Operand(t0));
663 __ lw(t1, MemOperand(t0));
664 __ push(t1);
665 __ bind(&entry);
666 __ Addu(a3, a3, Operand(-2));
667 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000668
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100669 // Call the function.
670 // a0: number of arguments
671 // a1: constructor function
672 if (is_api_function) {
673 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
674 Handle<Code> code =
675 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100677 } else {
678 ParameterCount actual(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000679 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch257744e2011-11-30 15:57:28 +0000680 }
681
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100682 // Store offset of return address for deoptimizer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000683 if (!is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100684 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
685 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000686
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100687 // Restore context from the frame.
688 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000689
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100690 // If the result is an object (in the ECMA sense), we should get rid
691 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
692 // on page 74.
693 Label use_receiver, exit;
694
695 // If the result is a smi, it is *not* an object in the ECMA sense.
696 // v0: result
697 // sp[0]: receiver (newly allocated object)
698 // sp[1]: constructor function
699 // sp[2]: number of arguments (smi-tagged)
700 __ JumpIfSmi(v0, &use_receiver);
701
702 // If the type of the result (stored in its map) is less than
703 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 __ GetObjectType(v0, a1, a3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100705 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
706
707 // Throw away the result of the constructor invocation and use the
708 // on-stack receiver as the result.
709 __ bind(&use_receiver);
710 __ lw(v0, MemOperand(sp));
711
712 // Remove receiver from the stack, remove caller arguments, and
713 // return.
714 __ bind(&exit);
715 // v0: result
716 // sp[0]: receiver (newly allocated object)
717 // sp[1]: constructor function
718 // sp[2]: number of arguments (smi-tagged)
719 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
720
721 // Leave construct frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000722 }
723
Ben Murdoch257744e2011-11-30 15:57:28 +0000724 __ sll(t0, a1, kPointerSizeLog2 - 1);
725 __ Addu(sp, sp, t0);
726 __ Addu(sp, sp, kPointerSize);
727 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
728 __ Ret();
Andrei Popescu31002712010-02-23 13:46:05 +0000729}
730
731
732void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000733 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Andrei Popescu31002712010-02-23 13:46:05 +0000734}
735
736
737void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000738 Generate_JSConstructStubHelper(masm, true, false);
739}
740
741
742static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
743 bool is_construct) {
744 // Called from JSEntryStub::GenerateBody
745
746 // ----------- S t a t e -------------
747 // -- a0: code entry
748 // -- a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100749 // -- a2: receiver_pointer
Ben Murdoch257744e2011-11-30 15:57:28 +0000750 // -- a3: argc
751 // -- s0: argv
752 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000753 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdoch257744e2011-11-30 15:57:28 +0000754
755 // Clear the context before we push it when entering the JS frame.
756 __ mov(cp, zero_reg);
757
758 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100759 {
760 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +0000761
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100762 // Set up the context from the function argument.
763 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000764
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100765 // Push the function and the receiver onto the stack.
766 __ Push(a1, a2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000767
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100768 // Copy arguments to the stack in a loop.
769 // a3: argc
770 // s0: argv, i.e. points to first arg
771 Label loop, entry;
772 __ sll(t0, a3, kPointerSizeLog2);
773 __ addu(t2, s0, t0);
774 __ b(&entry);
775 __ nop(); // Branch delay slot nop.
776 // t2 points past last arg.
777 __ bind(&loop);
778 __ lw(t0, MemOperand(s0)); // Read next parameter.
779 __ addiu(s0, s0, kPointerSize);
780 __ lw(t0, MemOperand(t0)); // Dereference handle.
781 __ push(t0); // Push parameter.
782 __ bind(&entry);
783 __ Branch(&loop, ne, s0, Operand(t2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000784
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100785 // Initialize all JavaScript callee-saved registers, since they will be seen
786 // by the garbage collector as part of handlers.
787 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
788 __ mov(s1, t0);
789 __ mov(s2, t0);
790 __ mov(s3, t0);
791 __ mov(s4, t0);
792 __ mov(s5, t0);
793 // s6 holds the root address. Do not clobber.
794 // s7 is cp. Do not init.
Ben Murdoch257744e2011-11-30 15:57:28 +0000795
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100796 // Invoke the code and pass argc as a0.
797 __ mov(a0, a3);
798 if (is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 // No type feedback cell is available
800 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
801 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100802 __ CallStub(&stub);
803 } else {
804 ParameterCount actual(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000805 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100806 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000807
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100808 // Leave internal frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000809 }
810
Ben Murdoch257744e2011-11-30 15:57:28 +0000811 __ Jump(ra);
Andrei Popescu31002712010-02-23 13:46:05 +0000812}
813
814
Andrei Popescu31002712010-02-23 13:46:05 +0000815void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000816 Generate_JSEntryTrampolineHelper(masm, false);
Andrei Popescu31002712010-02-23 13:46:05 +0000817}
818
819
820void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000821 Generate_JSEntryTrampolineHelper(masm, true);
Steve Block44f0eee2011-05-26 01:26:41 +0100822}
823
824
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000825void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
826 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
827 GenerateTailCallToReturnedCode(masm);
Steve Block44f0eee2011-05-26 01:26:41 +0100828}
829
830
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
832 FrameScope scope(masm, StackFrame::INTERNAL);
833 // Push a copy of the function onto the stack.
834 // Push function as parameter to the runtime call.
835 __ Push(a1, a1);
836 // Whether to compile in a background thread.
837 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
838
839 __ CallRuntime(Runtime::kCompileOptimized, 2);
840 // Restore receiver.
841 __ Pop(a1);
842}
843
844
845void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
846 CallCompileOptimized(masm, false);
847 GenerateTailCallToReturnedCode(masm);
848}
849
850
851void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
852 CallCompileOptimized(masm, true);
853 GenerateTailCallToReturnedCode(masm);
854}
855
856
857
858static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
859 // For now, we are relying on the fact that make_code_young doesn't do any
860 // garbage collection which allows us to save/restore the registers without
861 // worrying about which of them contain pointers. We also don't build an
862 // internal frame to make the code faster, since we shouldn't have to do stack
863 // crawls in MakeCodeYoung. This seems a bit fragile.
864
865 // Set a0 to point to the head of the PlatformCodeAge sequence.
866 __ Subu(a0, a0,
867 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
868
869 // The following registers must be saved and restored when calling through to
870 // the runtime:
871 // a0 - contains return address (beginning of patch sequence)
872 // a1 - isolate
873 RegList saved_regs =
874 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
875 FrameScope scope(masm, StackFrame::MANUAL);
876 __ MultiPush(saved_regs);
877 __ PrepareCallCFunction(2, 0, a2);
878 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
879 __ CallCFunction(
880 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
881 __ MultiPop(saved_regs);
882 __ Jump(a0);
883}
884
885#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
886void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
887 MacroAssembler* masm) { \
888 GenerateMakeCodeYoungAgainCommon(masm); \
889} \
890void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
891 MacroAssembler* masm) { \
892 GenerateMakeCodeYoungAgainCommon(masm); \
893}
894CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
895#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
896
897
898void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
899 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
900 // that make_code_young doesn't do any garbage collection which allows us to
901 // save/restore the registers without worrying about which of them contain
902 // pointers.
903
904 // Set a0 to point to the head of the PlatformCodeAge sequence.
905 __ Subu(a0, a0,
906 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
907
908 // The following registers must be saved and restored when calling through to
909 // the runtime:
910 // a0 - contains return address (beginning of patch sequence)
911 // a1 - isolate
912 RegList saved_regs =
913 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
914 FrameScope scope(masm, StackFrame::MANUAL);
915 __ MultiPush(saved_regs);
916 __ PrepareCallCFunction(2, 0, a2);
917 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
918 __ CallCFunction(
919 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
920 2);
921 __ MultiPop(saved_regs);
922
923 // Perform prologue operations usually performed by the young code stub.
924 __ Push(ra, fp, cp, a1);
925 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
926
927 // Jump to point after the code-age stub.
928 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
929 __ Jump(a0);
930}
931
932
933void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
934 GenerateMakeCodeYoungAgainCommon(masm);
935}
936
937
938static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
939 SaveFPRegsMode save_doubles) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100940 {
941 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +0000942
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943 // Preserve registers across notification, this is important for compiled
944 // stubs that tail call the runtime on deopts passing their parameters in
945 // registers.
946 __ MultiPush(kJSCallerSaved | kCalleeSaved);
947 // Pass the function and deoptimization type to the runtime system.
948 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
949 __ MultiPop(kJSCallerSaved | kCalleeSaved);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100950 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000951
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
953 __ Jump(ra); // Jump to miss handler
954}
955
956
957void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
958 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
959}
960
961
962void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
963 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Steve Block44f0eee2011-05-26 01:26:41 +0100964}
965
966
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100967static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
968 Deoptimizer::BailoutType type) {
969 {
970 FrameScope scope(masm, StackFrame::INTERNAL);
971 // Pass the function and deoptimization type to the runtime system.
972 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
973 __ push(a0);
974 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
975 }
976
977 // Get the full codegen state from the stack and untag it -> t2.
978 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
979 __ SmiUntag(t2);
980 // Switch on the state.
981 Label with_tos_register, unknown_state;
982 __ Branch(&with_tos_register,
983 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000984 __ Ret(USE_DELAY_SLOT);
985 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100986 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100987
988 __ bind(&with_tos_register);
989 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
990 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
991
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000992 __ Ret(USE_DELAY_SLOT);
993 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100994 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100995
996 __ bind(&unknown_state);
997 __ stop("no cases left");
998}
999
1000
Steve Block44f0eee2011-05-26 01:26:41 +01001001void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001002 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
Steve Block44f0eee2011-05-26 01:26:41 +01001003}
1004
1005
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1007 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1008}
1009
1010
Steve Block44f0eee2011-05-26 01:26:41 +01001011void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001012 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Steve Block44f0eee2011-05-26 01:26:41 +01001013}
1014
1015
Steve Block44f0eee2011-05-26 01:26:41 +01001016void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001017 // Lookup the function in the JavaScript frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001018 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1019 {
1020 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021 // Pass function as argument.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001022 __ push(a0);
1023 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1024 }
1025
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001026 // If the code object is null, just return to the unoptimized code.
1027 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001028
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001029 // Load deoptimization data from the code object.
1030 // <deopt_data> = <code>[#deoptimization_data_offset]
1031 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001032
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001033 // Load the OSR entrypoint offset from the deoptimization data.
1034 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1035 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1036 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1037 __ SmiUntag(a1);
1038
1039 // Compute the target address = code_obj + header_size + osr_offset
1040 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1041 __ addu(v0, v0, a1);
1042 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1043
1044 // And "return" to the OSR entry point of the function.
1045 __ Ret();
1046}
1047
1048
1049void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1050 // We check the stack limit as indicator that recompilation might be done.
1051 Label ok;
1052 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1053 __ Branch(&ok, hs, sp, Operand(at));
1054 {
1055 FrameScope scope(masm, StackFrame::INTERNAL);
1056 __ CallRuntime(Runtime::kStackGuard, 0);
1057 }
1058 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1059 RelocInfo::CODE_TARGET);
1060
1061 __ bind(&ok);
1062 __ Ret();
Andrei Popescu31002712010-02-23 13:46:05 +00001063}
1064
1065
1066void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001067 // 1. Make sure we have at least one argument.
1068 // a0: actual number of arguments
1069 { Label done;
1070 __ Branch(&done, ne, a0, Operand(zero_reg));
1071 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1072 __ push(t2);
1073 __ Addu(a0, a0, Operand(1));
1074 __ bind(&done);
1075 }
1076
1077 // 2. Get the function to call (passed as receiver) from the stack, check
1078 // if it is a function.
1079 // a0: actual number of arguments
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001080 Label slow, non_function;
Ben Murdoch257744e2011-11-30 15:57:28 +00001081 __ sll(at, a0, kPointerSizeLog2);
1082 __ addu(at, sp, at);
1083 __ lw(a1, MemOperand(at));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001084 __ JumpIfSmi(a1, &non_function);
Ben Murdoch257744e2011-11-30 15:57:28 +00001085 __ GetObjectType(a1, a2, a2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001086 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001087
1088 // 3a. Patch the first argument if necessary when calling a function.
1089 // a0: actual number of arguments
1090 // a1: function
1091 Label shift_arguments;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001092 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1093 { Label convert_to_object, use_global_proxy, patch_receiver;
Ben Murdoch257744e2011-11-30 15:57:28 +00001094 // Change context eagerly in case we need the global receiver.
1095 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1096
1097 // Do not transform the receiver for strict mode functions.
1098 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1099 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001100 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
Ben Murdoch257744e2011-11-30 15:57:28 +00001101 kSmiTagSize)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001102 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001103
1104 // Do not transform the receiver for native (Compilerhints already in a3).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001105 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1106 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001107
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001108 // Compute the receiver in sloppy mode.
Ben Murdoch257744e2011-11-30 15:57:28 +00001109 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1110 __ sll(at, a0, kPointerSizeLog2);
1111 __ addu(a2, sp, at);
1112 __ lw(a2, MemOperand(a2, -kPointerSize));
1113 // a0: actual number of arguments
1114 // a1: function
1115 // a2: first argument
1116 __ JumpIfSmi(a2, &convert_to_object, t2);
1117
1118 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001119 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00001120 __ LoadRoot(a3, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001121 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00001122
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001123 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001124 __ GetObjectType(a2, a3, a3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001125 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001126
1127 __ bind(&convert_to_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001128 // Enter an internal frame in order to preserve argument count.
1129 {
1130 FrameScope scope(masm, StackFrame::INTERNAL);
1131 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132 __ Push(a0, a2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001133 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1134 __ mov(a2, v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001135
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001136 __ pop(a0);
1137 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1138 // Leave internal frame.
1139 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001140
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001141 // Restore the function to a1, and the flag to t0.
Ben Murdoch257744e2011-11-30 15:57:28 +00001142 __ sll(at, a0, kPointerSizeLog2);
1143 __ addu(at, sp, at);
1144 __ lw(a1, MemOperand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001145 __ Branch(USE_DELAY_SLOT, &patch_receiver);
1146 __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001147
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001148 __ bind(&use_global_proxy);
1149 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1150 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001151
1152 __ bind(&patch_receiver);
1153 __ sll(at, a0, kPointerSizeLog2);
1154 __ addu(a3, sp, at);
1155 __ sw(a2, MemOperand(a3, -kPointerSize));
1156
1157 __ Branch(&shift_arguments);
1158 }
1159
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001160 // 3b. Check for function proxy.
1161 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001162 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001163 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1164
1165 __ bind(&non_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001167
1168 // 3c. Patch the first argument when calling a non-function. The
Ben Murdoch257744e2011-11-30 15:57:28 +00001169 // CALL_NON_FUNCTION builtin expects the non-function callee as
1170 // receiver, so overwrite the first argument which will ultimately
1171 // become the receiver.
1172 // a0: actual number of arguments
1173 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001174 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
Ben Murdoch257744e2011-11-30 15:57:28 +00001175 __ sll(at, a0, kPointerSizeLog2);
1176 __ addu(a2, sp, at);
1177 __ sw(a1, MemOperand(a2, -kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001178
1179 // 4. Shift arguments and return address one slot down on the stack
1180 // (overwriting the original receiver). Adjust argument count to make
1181 // the original first argument the new receiver.
1182 // a0: actual number of arguments
1183 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001184 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
Ben Murdoch257744e2011-11-30 15:57:28 +00001185 __ bind(&shift_arguments);
1186 { Label loop;
1187 // Calculate the copy start address (destination). Copy end address is sp.
1188 __ sll(at, a0, kPointerSizeLog2);
1189 __ addu(a2, sp, at);
1190
1191 __ bind(&loop);
1192 __ lw(at, MemOperand(a2, -kPointerSize));
1193 __ sw(at, MemOperand(a2));
1194 __ Subu(a2, a2, Operand(kPointerSize));
1195 __ Branch(&loop, ne, a2, Operand(sp));
1196 // Adjust the actual number of arguments and remove the top element
1197 // (which is a copy of the last argument).
1198 __ Subu(a0, a0, Operand(1));
1199 __ Pop();
1200 }
1201
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001202 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1203 // or a function proxy via CALL_FUNCTION_PROXY.
Ben Murdoch257744e2011-11-30 15:57:28 +00001204 // a0: actual number of arguments
1205 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001206 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1207 { Label function, non_proxy;
1208 __ Branch(&function, eq, t0, Operand(zero_reg));
1209 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1210 __ mov(a2, zero_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001211 __ Branch(&non_proxy, ne, t0, Operand(1));
1212
1213 __ push(a1); // Re-add proxy object as additional argument.
1214 __ Addu(a0, a0, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001215 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001216 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1217 RelocInfo::CODE_TARGET);
1218
1219 __ bind(&non_proxy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001220 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001221 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1222 RelocInfo::CODE_TARGET);
1223 __ bind(&function);
1224 }
1225
1226 // 5b. Get the code to call from the function and check that the number of
1227 // expected arguments matches what we're providing. If so, jump
1228 // (tail-call) to the code in register edx without checking arguments.
1229 // a0: actual number of arguments
1230 // a1: function
1231 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1232 __ lw(a2,
1233 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1234 __ sra(a2, a2, kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001235 // Check formal and actual parameter counts.
1236 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1237 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1238
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001239 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001240 ParameterCount expected(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
Andrei Popescu31002712010-02-23 13:46:05 +00001242}
1243
1244
1245void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001246 const int kIndexOffset =
1247 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1248 const int kLimitOffset =
1249 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1250 const int kArgsOffset = 2 * kPointerSize;
1251 const int kRecvOffset = 3 * kPointerSize;
1252 const int kFunctionOffset = 4 * kPointerSize;
Ben Murdoch257744e2011-11-30 15:57:28 +00001253
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001254 {
1255 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1256 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1257 __ push(a0);
1258 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1259 __ push(a0);
1260 // Returns (in v0) number of arguments to copy to stack as Smi.
1261 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001262
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001263 // Check the stack for overflow. We are not trying to catch
1264 // interruptions (e.g. debug break and preemption) here, so the "real stack
1265 // limit" is checked.
1266 Label okay;
1267 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1268 // Make a2 the space we have left. The stack might already be overflowed
1269 // here which will cause a2 to become negative.
1270 __ subu(a2, sp, a2);
1271 // Check if the arguments will overflow the stack.
1272 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1273 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
Ben Murdoch257744e2011-11-30 15:57:28 +00001274
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001275 // Out of stack space.
1276 __ lw(a1, MemOperand(fp, kFunctionOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001277 __ Push(a1, v0);
1278 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001279 // End of stack check.
Ben Murdoch257744e2011-11-30 15:57:28 +00001280
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001281 // Push current limit and index.
1282 __ bind(&okay);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001283 __ mov(a1, zero_reg);
1284 __ Push(v0, a1); // Limit and initial index.
Ben Murdoch257744e2011-11-30 15:57:28 +00001285
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001286 // Get the receiver.
1287 __ lw(a0, MemOperand(fp, kRecvOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001288
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001289 // Check that the function is a JS function (otherwise it must be a proxy).
1290 Label push_receiver;
1291 __ lw(a1, MemOperand(fp, kFunctionOffset));
1292 __ GetObjectType(a1, a2, a2);
1293 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001294
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001295 // Change context eagerly to get the right global object if necessary.
1296 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1297 // Load the shared function info while the function is still in a1.
1298 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001299
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001300 // Compute the receiver.
1301 // Do not transform the receiver for strict mode functions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001302 Label call_to_object, use_global_proxy;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001303 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1304 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1305 kSmiTagSize)));
1306 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001307
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001308 // Do not transform the receiver for native (Compilerhints already in a2).
1309 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1310 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312 // Compute the receiver in sloppy mode.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001313 __ JumpIfSmi(a0, &call_to_object);
1314 __ LoadRoot(a1, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001315 __ Branch(&use_global_proxy, eq, a0, Operand(a1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001316 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001317 __ Branch(&use_global_proxy, eq, a0, Operand(a2));
Ben Murdoch257744e2011-11-30 15:57:28 +00001318
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001319 // Check if the receiver is already a JavaScript object.
1320 // a0: receiver
1321 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1322 __ GetObjectType(a0, a1, a1);
1323 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001324
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001325 // Convert the receiver to a regular object.
1326 // a0: receiver
1327 __ bind(&call_to_object);
1328 __ push(a0);
1329 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1330 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1331 __ Branch(&push_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00001332
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001333 __ bind(&use_global_proxy);
1334 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1335 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001336
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001337 // Push the receiver.
1338 // a0: receiver
1339 __ bind(&push_receiver);
1340 __ push(a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001341
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001342 // Copy all arguments from the array to the stack.
1343 Label entry, loop;
1344 __ lw(a0, MemOperand(fp, kIndexOffset));
1345 __ Branch(&entry);
Ben Murdoch257744e2011-11-30 15:57:28 +00001346
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001347 // Load the current argument from the arguments array and push it to the
1348 // stack.
1349 // a0: current argument index
1350 __ bind(&loop);
1351 __ lw(a1, MemOperand(fp, kArgsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001352 __ Push(a1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001353
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001354 // Call the runtime to access the property in the arguments array.
1355 __ CallRuntime(Runtime::kGetProperty, 2);
1356 __ push(v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001357
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001358 // Use inline caching to access the arguments.
1359 __ lw(a0, MemOperand(fp, kIndexOffset));
1360 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1361 __ sw(a0, MemOperand(fp, kIndexOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001362
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001363 // Test if the copy loop has finished copying all the elements from the
1364 // arguments object.
1365 __ bind(&entry);
1366 __ lw(a1, MemOperand(fp, kLimitOffset));
1367 __ Branch(&loop, ne, a0, Operand(a1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001368
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001369 // Call the function.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001370 Label call_proxy;
1371 ParameterCount actual(a0);
1372 __ sra(a0, a0, kSmiTagSize);
1373 __ lw(a1, MemOperand(fp, kFunctionOffset));
1374 __ GetObjectType(a1, a2, a2);
1375 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1376
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001377 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001378
1379 frame_scope.GenerateLeaveFrame();
1380 __ Ret(USE_DELAY_SLOT);
1381 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1382
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001383 // Call the function proxy.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001384 __ bind(&call_proxy);
1385 __ push(a1); // Add function proxy as last argument.
1386 __ Addu(a0, a0, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001387 __ li(a2, Operand(0, RelocInfo::NONE32));
1388 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001389 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1390 RelocInfo::CODE_TARGET);
1391 // Tear down the internal frame and remove function, receiver and args.
1392 }
1393
1394 __ Ret(USE_DELAY_SLOT);
1395 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001396}
1397
1398
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001399static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1400 Label* stack_overflow) {
1401 // ----------- S t a t e -------------
1402 // -- a0 : actual number of arguments
1403 // -- a1 : function (passed through to callee)
1404 // -- a2 : expected number of arguments
1405 // -----------------------------------
1406 // Check the stack for overflow. We are not trying to catch
1407 // interruptions (e.g. debug break and preemption) here, so the "real stack
1408 // limit" is checked.
1409 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1410 // Make t1 the space we have left. The stack might already be overflowed
1411 // here which will cause t1 to become negative.
1412 __ subu(t1, sp, t1);
1413 // Check if the arguments will overflow the stack.
1414 __ sll(at, a2, kPointerSizeLog2);
1415 // Signed comparison.
1416 __ Branch(stack_overflow, le, t1, Operand(at));
1417}
1418
1419
Ben Murdoch257744e2011-11-30 15:57:28 +00001420static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1421 __ sll(a0, a0, kSmiTagSize);
1422 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1423 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424 __ Addu(fp, sp,
1425 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001426}
1427
1428
1429static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1430 // ----------- S t a t e -------------
1431 // -- v0 : result being passed through
1432 // -----------------------------------
1433 // Get the number of arguments passed (as a smi), tear down the frame and
1434 // then tear down the parameters.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001435 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1436 kPointerSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001437 __ mov(sp, fp);
1438 __ MultiPop(fp.bit() | ra.bit());
1439 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1440 __ Addu(sp, sp, t0);
1441 // Adjust for the receiver.
1442 __ Addu(sp, sp, Operand(kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00001443}
1444
1445
1446void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001447 // State setup as expected by MacroAssembler::InvokePrologue.
1448 // ----------- S t a t e -------------
1449 // -- a0: actual arguments count
1450 // -- a1: function (passed through to callee)
1451 // -- a2: expected arguments count
Ben Murdoch257744e2011-11-30 15:57:28 +00001452 // -----------------------------------
1453
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001454 Label stack_overflow;
1455 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001456 Label invoke, dont_adapt_arguments;
1457
1458 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001459 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001460 __ Branch(&dont_adapt_arguments, eq,
1461 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1462 // We use Uless as the number of argument should always be greater than 0.
1463 __ Branch(&too_few, Uless, a0, Operand(a2));
1464
1465 { // Enough parameters: actual >= expected.
1466 // a0: actual number of arguments as a smi
1467 // a1: function
1468 // a2: expected number of arguments
1469 // a3: code entry to call
1470 __ bind(&enough);
1471 EnterArgumentsAdaptorFrame(masm);
1472
1473 // Calculate copy start address into a0 and copy end address into a2.
1474 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1475 __ Addu(a0, fp, a0);
1476 // Adjust for return address and receiver.
1477 __ Addu(a0, a0, Operand(2 * kPointerSize));
1478 // Compute copy end address.
1479 __ sll(a2, a2, kPointerSizeLog2);
1480 __ subu(a2, a0, a2);
1481
1482 // Copy the arguments (including the receiver) to the new stack frame.
1483 // a0: copy start address
1484 // a1: function
1485 // a2: copy end address
1486 // a3: code entry to call
1487
1488 Label copy;
1489 __ bind(&copy);
1490 __ lw(t0, MemOperand(a0));
1491 __ push(t0);
1492 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1493 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1494
1495 __ jmp(&invoke);
1496 }
1497
1498 { // Too few parameters: Actual < expected.
1499 __ bind(&too_few);
1500 EnterArgumentsAdaptorFrame(masm);
1501
Ben Murdoch257744e2011-11-30 15:57:28 +00001502 // Calculate copy start address into a0 and copy end address is fp.
1503 // a0: actual number of arguments as a smi
1504 // a1: function
1505 // a2: expected number of arguments
1506 // a3: code entry to call
1507 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1508 __ Addu(a0, fp, a0);
1509 // Adjust for return address and receiver.
1510 __ Addu(a0, a0, Operand(2 * kPointerSize));
1511 // Compute copy end address. Also adjust for return address.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001512 __ Addu(t3, fp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001513
1514 // Copy the arguments (including the receiver) to the new stack frame.
1515 // a0: copy start address
1516 // a1: function
1517 // a2: expected number of arguments
1518 // a3: code entry to call
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001519 // t3: copy end address
Ben Murdoch257744e2011-11-30 15:57:28 +00001520 Label copy;
1521 __ bind(&copy);
1522 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001523 __ Subu(sp, sp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001524 __ Subu(a0, a0, kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001525 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
1526 __ sw(t0, MemOperand(sp)); // In the delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001527
1528 // Fill the remaining expected arguments with undefined.
1529 // a1: function
1530 // a2: expected number of arguments
1531 // a3: code entry to call
1532 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1533 __ sll(t2, a2, kPointerSizeLog2);
1534 __ Subu(a2, fp, Operand(t2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001535 // Adjust for frame.
1536 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1537 2 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001538
1539 Label fill;
1540 __ bind(&fill);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001541 __ Subu(sp, sp, kPointerSize);
1542 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1543 __ sw(t0, MemOperand(sp));
Ben Murdoch257744e2011-11-30 15:57:28 +00001544 }
1545
1546 // Call the entry point.
1547 __ bind(&invoke);
1548
1549 __ Call(a3);
1550
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001551 // Store offset of return address for deoptimizer.
1552 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1553
Ben Murdoch257744e2011-11-30 15:57:28 +00001554 // Exit frame and return.
1555 LeaveArgumentsAdaptorFrame(masm);
1556 __ Ret();
1557
1558
1559 // -------------------------------------------
1560 // Don't adapt arguments.
1561 // -------------------------------------------
1562 __ bind(&dont_adapt_arguments);
1563 __ Jump(a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001564
1565 __ bind(&stack_overflow);
1566 {
1567 FrameScope frame(masm, StackFrame::MANUAL);
1568 EnterArgumentsAdaptorFrame(masm);
1569 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1570 __ break_(0xCC);
1571 }
Andrei Popescu31002712010-02-23 13:46:05 +00001572}
1573
1574
1575#undef __
1576
1577} } // namespace v8::internal
1578
Leon Clarkef7060e22010-06-03 12:02:55 +01001579#endif // V8_TARGET_ARCH_MIPS