blob: 2813dd4cdb202ecaf3fd40e0b34255db9a60843e [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Andrei Popescu31002712010-02-23 13:46:05 +00004
5
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/v8.h"
Andrei Popescu31002712010-02-23 13:46:05 +00008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#if V8_TARGET_ARCH_MIPS
Leon Clarkef7060e22010-06-03 12:02:55 +010010
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/codegen.h"
12#include "src/debug.h"
13#include "src/deoptimizer.h"
14#include "src/full-codegen.h"
15#include "src/runtime.h"
16
Andrei Popescu31002712010-02-23 13:46:05 +000017
18namespace v8 {
19namespace internal {
20
21
22#define __ ACCESS_MASM(masm)
23
24
25void Builtins::Generate_Adaptor(MacroAssembler* masm,
26 CFunctionId id,
27 BuiltinExtraArguments extra_args) {
Ben Murdoch257744e2011-11-30 15:57:28 +000028 // ----------- S t a t e -------------
29 // -- a0 : number of arguments excluding receiver
30 // -- a1 : called function (only guaranteed when
31 // -- extra_args requires it)
32 // -- cp : context
33 // -- sp[0] : last argument
34 // -- ...
35 // -- sp[4 * (argc - 1)] : first argument
36 // -- sp[4 * agrc] : receiver
37 // -----------------------------------
38
39 // Insert extra arguments.
40 int num_extra_args = 0;
41 if (extra_args == NEEDS_CALLED_FUNCTION) {
42 num_extra_args = 1;
43 __ push(a1);
44 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
Ben Murdoch257744e2011-11-30 15:57:28 +000046 }
47
Ben Murdoch3ef787d2012-04-12 10:51:47 +010048 // JumpToExternalReference expects s0 to contain the number of arguments
Ben Murdoch257744e2011-11-30 15:57:28 +000049 // including the receiver and the extra arguments.
Ben Murdoch3ef787d2012-04-12 10:51:47 +010050 __ Addu(s0, a0, num_extra_args + 1);
51 __ sll(s1, s0, kPointerSizeLog2);
52 __ Subu(s1, s1, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +000053 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
54}
55
56
Ben Murdoch3ef787d2012-04-12 10:51:47 +010057// Load the built-in InternalArray function from the current context.
58static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
59 Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 // Load the native context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +010061
Ben Murdoch3ef787d2012-04-12 10:51:47 +010062 __ lw(result,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
64 __ lw(result,
65 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
66 // Load the InternalArray function from the native context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067 __ lw(result,
68 MemOperand(result,
69 Context::SlotOffset(
70 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
71}
72
73
Ben Murdoch257744e2011-11-30 15:57:28 +000074// Load the built-in Array function from the current context.
75static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 // Load the native context.
Ben Murdoch257744e2011-11-30 15:57:28 +000077
Ben Murdoch257744e2011-11-30 15:57:28 +000078 __ lw(result,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
80 __ lw(result,
81 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
82 // Load the Array function from the native context.
Ben Murdoch257744e2011-11-30 15:57:28 +000083 __ lw(result,
Ben Murdoch3ef787d2012-04-12 10:51:47 +010084 MemOperand(result,
85 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
Ben Murdoch257744e2011-11-30 15:57:28 +000086}
87
88
Ben Murdoch3ef787d2012-04-12 10:51:47 +010089void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
90 // ----------- S t a t e -------------
91 // -- a0 : number of arguments
92 // -- ra : return address
93 // -- sp[...]: constructor arguments
94 // -----------------------------------
95 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
96
97 // Get the InternalArray function.
98 GenerateLoadInternalArrayFunction(masm, a1);
99
100 if (FLAG_debug_code) {
101 // Initial map for the builtin InternalArray functions should be maps.
102 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103 __ SmiTst(a2, t0);
104 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100105 t0, Operand(zero_reg));
106 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100108 t0, Operand(MAP_TYPE));
109 }
110
111 // Run the native code for the InternalArray function called as a normal
112 // function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000113 // Tail call a stub.
114 InternalArrayConstructorStub stub(masm->isolate());
115 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +0000116}
117
118
119void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000120 // ----------- S t a t e -------------
121 // -- a0 : number of arguments
122 // -- ra : return address
123 // -- sp[...]: constructor arguments
124 // -----------------------------------
125 Label generic_array_code;
126
127 // Get the Array function.
128 GenerateLoadArrayFunction(masm, a1);
129
130 if (FLAG_debug_code) {
131 // Initial map for the builtin Array functions should be maps.
132 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000133 __ SmiTst(a2, t0);
134 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
Ben Murdoch257744e2011-11-30 15:57:28 +0000135 t0, Operand(zero_reg));
136 __ GetObjectType(a2, a3, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000137 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
Ben Murdoch257744e2011-11-30 15:57:28 +0000138 t0, Operand(MAP_TYPE));
139 }
140
141 // Run the native code for the Array function called as a normal function.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 // Tail call a stub.
143 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
144 ArrayConstructorStub stub(masm->isolate());
145 __ TailCallStub(&stub);
Andrei Popescu31002712010-02-23 13:46:05 +0000146}
147
148
Steve Block44f0eee2011-05-26 01:26:41 +0100149void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000150 // ----------- S t a t e -------------
151 // -- a0 : number of arguments
152 // -- a1 : constructor function
153 // -- ra : return address
154 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
155 // -- sp[argc * 4] : receiver
156 // -----------------------------------
157 Counters* counters = masm->isolate()->counters();
158 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
159
160 Register function = a1;
161 if (FLAG_debug_code) {
162 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000164 }
165
166 // Load the first arguments in a0 and get rid of the rest.
167 Label no_arguments;
168 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
169 // First args = sp[(argc - 1) * 4].
170 __ Subu(a0, a0, Operand(1));
171 __ sll(a0, a0, kPointerSizeLog2);
172 __ Addu(sp, a0, sp);
173 __ lw(a0, MemOperand(sp));
174 // sp now point to args[0], drop args[0] + receiver.
175 __ Drop(2);
176
177 Register argument = a2;
178 Label not_cached, argument_is_string;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 __ LookupNumberStringCache(a0, // Input.
180 argument, // Result.
181 a3, // Scratch.
182 t0, // Scratch.
183 t1, // Scratch.
184 &not_cached);
Ben Murdoch257744e2011-11-30 15:57:28 +0000185 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
186 __ bind(&argument_is_string);
187
188 // ----------- S t a t e -------------
189 // -- a2 : argument converted to string
190 // -- a1 : constructor function
191 // -- ra : return address
192 // -----------------------------------
193
194 Label gc_required;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000195 __ Allocate(JSValue::kSize,
196 v0, // Result.
197 a3, // Scratch.
198 t0, // Scratch.
199 &gc_required,
200 TAG_OBJECT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000201
202 // Initialising the String Object.
203 Register map = a3;
204 __ LoadGlobalFunctionInitialMap(function, map, t0);
205 if (FLAG_debug_code) {
206 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000207 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
Ben Murdoch257744e2011-11-30 15:57:28 +0000208 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
209 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000210 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
Ben Murdoch257744e2011-11-30 15:57:28 +0000211 t0, Operand(zero_reg));
212 }
213 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
214
215 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
216 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
217 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
218
219 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
220
221 // Ensure the object is fully initialized.
222 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
223
224 __ Ret();
225
226 // The argument was not found in the number to string cache. Check
227 // if it's a string already before calling the conversion builtin.
228 Label convert_argument;
229 __ bind(&not_cached);
230 __ JumpIfSmi(a0, &convert_argument);
231
232 // Is it a String?
233 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
234 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +0000235 STATIC_ASSERT(kNotStringTag != 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000236 __ And(t0, a3, Operand(kIsNotStringMask));
237 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
238 __ mov(argument, a0);
239 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
240 __ Branch(&argument_is_string);
241
242 // Invoke the conversion builtin and put the result into a2.
243 __ bind(&convert_argument);
244 __ push(function); // Preserve the function.
245 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100246 {
247 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 __ push(a0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100249 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
250 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000251 __ pop(function);
252 __ mov(argument, v0);
253 __ Branch(&argument_is_string);
254
255 // Load the empty string into a2, remove the receiver from the
256 // stack, and jump back to the case where the argument is a string.
257 __ bind(&no_arguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000259 __ Drop(1);
260 __ Branch(&argument_is_string);
261
262 // At this point the argument is already a string. Call runtime to
263 // create a string wrapper.
264 __ bind(&gc_required);
265 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100266 {
267 FrameScope scope(masm, StackFrame::INTERNAL);
268 __ push(argument);
269 __ CallRuntime(Runtime::kNewStringWrapper, 1);
270 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000271 __ Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100272}
273
274
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275static void CallRuntimePassFunction(
276 MacroAssembler* masm, Runtime::FunctionId function_id) {
277 FrameScope scope(masm, StackFrame::INTERNAL);
278 // Push a copy of the function onto the stack.
279 // Push call kind information and function as parameter to the runtime call.
280 __ Push(a1, a1);
281
282 __ CallRuntime(function_id, 1);
283 // Restore call kind information and receiver.
284 __ Pop(a1);
285}
286
287
288static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
289 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
290 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
291 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
292 __ Jump(at);
293}
294
295
296static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
297 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
298 __ Jump(at);
299}
300
301
302void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
303 // Checking whether the queued function is ready for install is optional,
304 // since we come across interrupts and stack checks elsewhere. However,
305 // not checking may delay installing ready functions, and always checking
306 // would be quite expensive. A good compromise is to first check against
307 // stack limit as a cue for an interrupt signal.
308 Label ok;
309 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
310 __ Branch(&ok, hs, sp, Operand(t0));
311
312 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
313 GenerateTailCallToReturnedCode(masm);
314
315 __ bind(&ok);
316 GenerateTailCallToSharedCode(masm);
317}
318
319
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100320static void Generate_JSConstructStubHelper(MacroAssembler* masm,
321 bool is_api_function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 bool create_memento) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000323 // ----------- S t a t e -------------
324 // -- a0 : number of arguments
325 // -- a1 : constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326 // -- a2 : allocation site or undefined
Ben Murdoch257744e2011-11-30 15:57:28 +0000327 // -- ra : return address
328 // -- sp[...]: constructor arguments
329 // -----------------------------------
330
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 // Should never create mementos for api functions.
332 DCHECK(!is_api_function || !create_memento);
Ben Murdoch257744e2011-11-30 15:57:28 +0000333
334 Isolate* isolate = masm->isolate();
335
336 // ----------- S t a t e -------------
337 // -- a0 : number of arguments
338 // -- a1 : constructor function
339 // -- ra : return address
340 // -- sp[...]: constructor arguments
341 // -----------------------------------
342
343 // Enter a construct frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100344 {
345 FrameScope scope(masm, StackFrame::CONSTRUCT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000346
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 if (create_memento) {
348 __ AssertUndefinedOrAllocationSite(a2, a3);
349 __ push(a2);
350 }
351
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100352 // Preserve the two incoming parameters on the stack.
353 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
354 __ MultiPushReversed(a0.bit() | a1.bit());
Ben Murdoch257744e2011-11-30 15:57:28 +0000355
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100356 Label rt_call, allocated;
357 // Try to allocate the object without transitioning into C code. If any of
358 // the preconditions is not met, the code bails out to the runtime call.
359 if (FLAG_inline_new) {
360 Label undo_allocation;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100361 ExternalReference debug_step_in_fp =
362 ExternalReference::debug_step_in_fp_address(isolate);
363 __ li(a2, Operand(debug_step_in_fp));
364 __ lw(a2, MemOperand(a2));
365 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000366
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100367 // Load the initial map and verify that it is in fact a map.
368 // a1: constructor function
369 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
370 __ JumpIfSmi(a2, &rt_call);
371 __ GetObjectType(a2, a3, t4);
372 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000373
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100374 // Check that the constructor is not constructing a JSFunction (see
375 // comments in Runtime_NewObject in runtime.cc). In which case the
376 // initial map's instance type would be JS_FUNCTION_TYPE.
377 // a1: constructor function
378 // a2: initial map
379 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
380 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000381
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382 if (!is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100383 Label allocate;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000384 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
385 // Check if slack tracking is enabled.
386 __ lw(t0, bit_field3);
387 __ DecodeField<Map::ConstructionCount>(t2, t0);
388 __ Branch(&allocate, eq, t2, Operand(JSFunction::kNoSlackTracking));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100389 // Decrease generous allocation count.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000390 __ Subu(t0, t0, Operand(1 << Map::ConstructionCount::kShift));
391 __ Branch(USE_DELAY_SLOT,
392 &allocate, ne, t2, Operand(JSFunction::kFinishSlackTracking));
393 __ sw(t0, bit_field3); // In delay slot.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100394
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000395 __ Push(a1, a2, a1); // a1 = Constructor.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100396 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
397
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000398 __ Pop(a1, a2);
399 // Slack tracking counter is kNoSlackTracking after runtime call.
400 DCHECK(JSFunction::kNoSlackTracking == 0);
401 __ mov(t2, zero_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100402
403 __ bind(&allocate);
404 }
405
406 // Now allocate the JSObject on the heap.
407 // a1: constructor function
408 // a2: initial map
409 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000410 if (create_memento) {
411 __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
412 }
413
414 __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100415
416 // Allocated the JSObject, now initialize the fields. Map is set to
417 // initial map and properties and elements are set to empty fixed array.
418 // a1: constructor function
419 // a2: initial map
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000420 // a3: object size (not including memento if create_memento)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100421 // t4: JSObject (not tagged)
422 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
423 __ mov(t5, t4);
424 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
425 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
426 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
427 __ Addu(t5, t5, Operand(3*kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000428 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
429 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
430 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100431
432 // Fill all the in-object properties with appropriate filler.
433 // a1: constructor function
434 // a2: initial map
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435 // a3: object size (in words, including memento if create_memento)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100436 // t4: JSObject (not tagged)
437 // t5: First in-object property of JSObject (not tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438 // t2: slack tracking counter (non-API function case)
439 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
440
441 // Use t7 to hold undefined, which is used in several places below.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100442 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443
444 if (!is_api_function) {
445 Label no_inobject_slack_tracking;
446
447 // Check if slack tracking is enabled.
448 __ Branch(&no_inobject_slack_tracking,
449 eq, t2, Operand(JSFunction::kNoSlackTracking));
450
451 // Allocate object with a slack.
452 __ lbu(a0, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
453 __ sll(at, a0, kPointerSizeLog2);
454 __ addu(a0, t5, at);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100455 // a0: offset of first field after pre-allocated fields
456 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 __ sll(at, a3, kPointerSizeLog2);
458 __ Addu(t6, t4, Operand(at)); // End of object.
459 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100460 a0, Operand(t6));
461 }
462 __ InitializeFieldsWithFiller(t5, a0, t7);
Ben Murdoch257744e2011-11-30 15:57:28 +0000463 // To allow for truncation.
464 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000465 // Fill the remaining fields with one pointer filler map.
466
467 __ bind(&no_inobject_slack_tracking);
Ben Murdoch257744e2011-11-30 15:57:28 +0000468 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469
470 if (create_memento) {
471 __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
472 __ sll(a0, a0, kPointerSizeLog2);
473 __ Addu(a0, t4, Operand(a0)); // End of object.
474 __ InitializeFieldsWithFiller(t5, a0, t7);
475
476 // Fill in memento fields.
477 // t5: points to the allocated but uninitialized memento.
478 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
479 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
480 __ sw(t7, MemOperand(t5));
481 __ Addu(t5, t5, kPointerSize);
482 // Load the AllocationSite.
483 __ lw(t7, MemOperand(sp, 2 * kPointerSize));
484 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
485 __ sw(t7, MemOperand(t5));
486 __ Addu(t5, t5, kPointerSize);
487 } else {
488 __ sll(at, a3, kPointerSizeLog2);
489 __ Addu(a0, t4, Operand(at)); // End of object.
490 __ InitializeFieldsWithFiller(t5, a0, t7);
491 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100492
493 // Add the object tag to make the JSObject real, so that we can continue
494 // and jump into the continuation code at any time from now on. Any
495 // failures need to undo the allocation, so that the heap is in a
496 // consistent state and verifiable.
497 __ Addu(t4, t4, Operand(kHeapObjectTag));
498
499 // Check if a non-empty properties array is needed. Continue with
500 // allocated object if not fall through to runtime call if it is.
501 // a1: constructor function
502 // t4: JSObject
503 // t5: start of next object (not tagged)
504 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
505 // The field instance sizes contains both pre-allocated property fields
506 // and in-object properties.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 __ lbu(t6, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100508 __ Addu(a3, a3, Operand(t6));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 __ lbu(t6, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100510 __ subu(a3, a3, t6);
511
512 // Done if no extra properties are to be allocated.
513 __ Branch(&allocated, eq, a3, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514 __ Assert(greater_equal, kPropertyAllocationCountFailed,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100515 a3, Operand(zero_reg));
516
517 // Scale the number of elements by pointer size and add the header for
518 // FixedArrays to the start of the next object calculation from above.
519 // a1: constructor
520 // a3: number of elements in properties array
521 // t4: JSObject
522 // t5: start of next object
523 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 __ Allocate(
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525 a0,
526 t5,
527 t6,
528 a2,
529 &undo_allocation,
530 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
531
532 // Initialize the FixedArray.
533 // a1: constructor
534 // a3: number of elements in properties array (untagged)
535 // t4: JSObject
536 // t5: start of next object
537 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
538 __ mov(a2, t5);
539 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
540 __ sll(a0, a3, kSmiTagSize);
541 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
542 __ Addu(a2, a2, Operand(2 * kPointerSize));
543
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
545 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100546
547 // Initialize the fields to undefined.
548 // a1: constructor
549 // a2: First element of FixedArray (not tagged)
550 // a3: number of elements in properties array
551 // t4: JSObject
552 // t5: FixedArray (not tagged)
553 __ sll(t3, a3, kPointerSizeLog2);
554 __ addu(t6, a2, t3); // End of object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100556 { Label loop, entry;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000557 if (!is_api_function || create_memento) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100558 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
559 } else if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000560 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
561 __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100562 }
563 __ jmp(&entry);
564 __ bind(&loop);
565 __ sw(t7, MemOperand(a2));
566 __ addiu(a2, a2, kPointerSize);
567 __ bind(&entry);
568 __ Branch(&loop, less, a2, Operand(t6));
569 }
570
571 // Store the initialized FixedArray into the properties field of
572 // the JSObject.
573 // a1: constructor function
574 // t4: JSObject
575 // t5: FixedArray (not tagged)
576 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
577 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
578
579 // Continue with JSObject being successfully allocated.
580 // a1: constructor function
581 // a4: JSObject
582 __ jmp(&allocated);
583
584 // Undo the setting of the new top so that the heap is verifiable. For
585 // example, the map's unused properties potentially do not match the
586 // allocated objects unused properties.
587 // t4: JSObject (previous new top)
588 __ bind(&undo_allocation);
589 __ UndoAllocationInNewSpace(t4, t5);
Ben Murdoch257744e2011-11-30 15:57:28 +0000590 }
591
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100592 // Allocate the new receiver object using the runtime call.
Ben Murdoch85b71792012-04-11 18:30:58 +0100593 // a1: constructor function
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 __ bind(&rt_call);
595 if (create_memento) {
596 // Get the cell or allocation site.
597 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
598 __ push(a2);
599 }
600
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100601 __ push(a1); // Argument for Runtime_NewObject.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 if (create_memento) {
603 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
604 } else {
605 __ CallRuntime(Runtime::kNewObject, 1);
606 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100607 __ mov(t4, v0);
608
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609 // If we ended up using the runtime, and we want a memento, then the
610 // runtime call made it for us, and we shouldn't do create count
611 // increment.
612 Label count_incremented;
613 if (create_memento) {
614 __ jmp(&count_incremented);
615 }
616
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100617 // Receiver for constructor call allocated.
Ben Murdoch257744e2011-11-30 15:57:28 +0000618 // t4: JSObject
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100619 __ bind(&allocated);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000620
621 if (create_memento) {
622 __ lw(a2, MemOperand(sp, kPointerSize * 2));
623 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
624 __ Branch(&count_incremented, eq, a2, Operand(t5));
625 // a2 is an AllocationSite. We are creating a memento from it, so we
626 // need to increment the memento create count.
627 __ lw(a3, FieldMemOperand(a2,
628 AllocationSite::kPretenureCreateCountOffset));
629 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
630 __ sw(a3, FieldMemOperand(a2,
631 AllocationSite::kPretenureCreateCountOffset));
632 __ bind(&count_incremented);
633 }
634
635 __ Push(t4, t4);
Ben Murdoch257744e2011-11-30 15:57:28 +0000636
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100637 // Reload the number of arguments from the stack.
638 // sp[0]: receiver
639 // sp[1]: receiver
640 // sp[2]: constructor function
641 // sp[3]: number of arguments (smi-tagged)
642 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
643 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +0000644
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100645 // Set up pointer to last argument.
646 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000647
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100648 // Set up number of arguments for function call below.
649 __ srl(a0, a3, kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +0000650
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100651 // Copy arguments and receiver to the expression stack.
652 // a0: number of arguments
653 // a1: constructor function
654 // a2: address of last argument (caller sp)
655 // a3: number of arguments (smi-tagged)
656 // sp[0]: receiver
657 // sp[1]: receiver
658 // sp[2]: constructor function
659 // sp[3]: number of arguments (smi-tagged)
660 Label loop, entry;
661 __ jmp(&entry);
662 __ bind(&loop);
663 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
664 __ Addu(t0, a2, Operand(t0));
665 __ lw(t1, MemOperand(t0));
666 __ push(t1);
667 __ bind(&entry);
668 __ Addu(a3, a3, Operand(-2));
669 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000670
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100671 // Call the function.
672 // a0: number of arguments
673 // a1: constructor function
674 if (is_api_function) {
675 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
676 Handle<Code> code =
677 masm->isolate()->builtins()->HandleApiCallConstruct();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 __ Call(code, RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100679 } else {
680 ParameterCount actual(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch257744e2011-11-30 15:57:28 +0000682 }
683
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100684 // Store offset of return address for deoptimizer.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000685 if (!is_api_function) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100686 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
687 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000688
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100689 // Restore context from the frame.
690 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000691
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100692 // If the result is an object (in the ECMA sense), we should get rid
693 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
694 // on page 74.
695 Label use_receiver, exit;
696
697 // If the result is a smi, it is *not* an object in the ECMA sense.
698 // v0: result
699 // sp[0]: receiver (newly allocated object)
700 // sp[1]: constructor function
701 // sp[2]: number of arguments (smi-tagged)
702 __ JumpIfSmi(v0, &use_receiver);
703
704 // If the type of the result (stored in its map) is less than
705 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000706 __ GetObjectType(v0, a1, a3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100707 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
708
709 // Throw away the result of the constructor invocation and use the
710 // on-stack receiver as the result.
711 __ bind(&use_receiver);
712 __ lw(v0, MemOperand(sp));
713
714 // Remove receiver from the stack, remove caller arguments, and
715 // return.
716 __ bind(&exit);
717 // v0: result
718 // sp[0]: receiver (newly allocated object)
719 // sp[1]: constructor function
720 // sp[2]: number of arguments (smi-tagged)
721 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
722
723 // Leave construct frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000724 }
725
Ben Murdoch257744e2011-11-30 15:57:28 +0000726 __ sll(t0, a1, kPointerSizeLog2 - 1);
727 __ Addu(sp, sp, t0);
728 __ Addu(sp, sp, kPointerSize);
729 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
730 __ Ret();
Andrei Popescu31002712010-02-23 13:46:05 +0000731}
732
733
734void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
Andrei Popescu31002712010-02-23 13:46:05 +0000736}
737
738
739void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000740 Generate_JSConstructStubHelper(masm, true, false);
741}
742
743
744static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
745 bool is_construct) {
746 // Called from JSEntryStub::GenerateBody
747
748 // ----------- S t a t e -------------
749 // -- a0: code entry
750 // -- a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100751 // -- a2: receiver_pointer
Ben Murdoch257744e2011-11-30 15:57:28 +0000752 // -- a3: argc
753 // -- s0: argv
754 // -----------------------------------
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdoch257744e2011-11-30 15:57:28 +0000756
757 // Clear the context before we push it when entering the JS frame.
758 __ mov(cp, zero_reg);
759
760 // Enter an internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100761 {
762 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +0000763
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100764 // Set up the context from the function argument.
765 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000766
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100767 // Push the function and the receiver onto the stack.
768 __ Push(a1, a2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000769
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100770 // Copy arguments to the stack in a loop.
771 // a3: argc
772 // s0: argv, i.e. points to first arg
773 Label loop, entry;
774 __ sll(t0, a3, kPointerSizeLog2);
775 __ addu(t2, s0, t0);
776 __ b(&entry);
777 __ nop(); // Branch delay slot nop.
778 // t2 points past last arg.
779 __ bind(&loop);
780 __ lw(t0, MemOperand(s0)); // Read next parameter.
781 __ addiu(s0, s0, kPointerSize);
782 __ lw(t0, MemOperand(t0)); // Dereference handle.
783 __ push(t0); // Push parameter.
784 __ bind(&entry);
785 __ Branch(&loop, ne, s0, Operand(t2));
Ben Murdoch257744e2011-11-30 15:57:28 +0000786
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100787 // Initialize all JavaScript callee-saved registers, since they will be seen
788 // by the garbage collector as part of handlers.
789 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
790 __ mov(s1, t0);
791 __ mov(s2, t0);
792 __ mov(s3, t0);
793 __ mov(s4, t0);
794 __ mov(s5, t0);
795 // s6 holds the root address. Do not clobber.
796 // s7 is cp. Do not init.
Ben Murdoch257744e2011-11-30 15:57:28 +0000797
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100798 // Invoke the code and pass argc as a0.
799 __ mov(a0, a3);
800 if (is_construct) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 // No type feedback cell is available
802 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
803 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100804 __ CallStub(&stub);
805 } else {
806 ParameterCount actual(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000807 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100808 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000809
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100810 // Leave internal frame.
Ben Murdoch257744e2011-11-30 15:57:28 +0000811 }
812
Ben Murdoch257744e2011-11-30 15:57:28 +0000813 __ Jump(ra);
Andrei Popescu31002712010-02-23 13:46:05 +0000814}
815
816
Andrei Popescu31002712010-02-23 13:46:05 +0000817void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000818 Generate_JSEntryTrampolineHelper(masm, false);
Andrei Popescu31002712010-02-23 13:46:05 +0000819}
820
821
822void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000823 Generate_JSEntryTrampolineHelper(masm, true);
Steve Block44f0eee2011-05-26 01:26:41 +0100824}
825
826
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
828 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
829 GenerateTailCallToReturnedCode(masm);
Steve Block44f0eee2011-05-26 01:26:41 +0100830}
831
832
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000833static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
834 FrameScope scope(masm, StackFrame::INTERNAL);
835 // Push a copy of the function onto the stack.
836 // Push function as parameter to the runtime call.
837 __ Push(a1, a1);
838 // Whether to compile in a background thread.
839 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
840
841 __ CallRuntime(Runtime::kCompileOptimized, 2);
842 // Restore receiver.
843 __ Pop(a1);
844}
845
846
847void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
848 CallCompileOptimized(masm, false);
849 GenerateTailCallToReturnedCode(masm);
850}
851
852
853void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
854 CallCompileOptimized(masm, true);
855 GenerateTailCallToReturnedCode(masm);
856}
857
858
859
860static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
861 // For now, we are relying on the fact that make_code_young doesn't do any
862 // garbage collection which allows us to save/restore the registers without
863 // worrying about which of them contain pointers. We also don't build an
864 // internal frame to make the code faster, since we shouldn't have to do stack
865 // crawls in MakeCodeYoung. This seems a bit fragile.
866
867 // Set a0 to point to the head of the PlatformCodeAge sequence.
868 __ Subu(a0, a0,
869 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
870
871 // The following registers must be saved and restored when calling through to
872 // the runtime:
873 // a0 - contains return address (beginning of patch sequence)
874 // a1 - isolate
875 RegList saved_regs =
876 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
877 FrameScope scope(masm, StackFrame::MANUAL);
878 __ MultiPush(saved_regs);
879 __ PrepareCallCFunction(2, 0, a2);
880 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
881 __ CallCFunction(
882 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
883 __ MultiPop(saved_regs);
884 __ Jump(a0);
885}
886
887#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
888void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
889 MacroAssembler* masm) { \
890 GenerateMakeCodeYoungAgainCommon(masm); \
891} \
892void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
893 MacroAssembler* masm) { \
894 GenerateMakeCodeYoungAgainCommon(masm); \
895}
896CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
897#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
898
899
900void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
901 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
902 // that make_code_young doesn't do any garbage collection which allows us to
903 // save/restore the registers without worrying about which of them contain
904 // pointers.
905
906 // Set a0 to point to the head of the PlatformCodeAge sequence.
907 __ Subu(a0, a0,
908 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
909
910 // The following registers must be saved and restored when calling through to
911 // the runtime:
912 // a0 - contains return address (beginning of patch sequence)
913 // a1 - isolate
914 RegList saved_regs =
915 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
916 FrameScope scope(masm, StackFrame::MANUAL);
917 __ MultiPush(saved_regs);
918 __ PrepareCallCFunction(2, 0, a2);
919 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
920 __ CallCFunction(
921 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
922 2);
923 __ MultiPop(saved_regs);
924
925 // Perform prologue operations usually performed by the young code stub.
926 __ Push(ra, fp, cp, a1);
927 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
928
929 // Jump to point after the code-age stub.
930 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
931 __ Jump(a0);
932}
933
934
935void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
936 GenerateMakeCodeYoungAgainCommon(masm);
937}
938
939
940static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
941 SaveFPRegsMode save_doubles) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100942 {
943 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch257744e2011-11-30 15:57:28 +0000944
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000945 // Preserve registers across notification, this is important for compiled
946 // stubs that tail call the runtime on deopts passing their parameters in
947 // registers.
948 __ MultiPush(kJSCallerSaved | kCalleeSaved);
949 // Pass the function and deoptimization type to the runtime system.
950 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
951 __ MultiPop(kJSCallerSaved | kCalleeSaved);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100952 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000953
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000954 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
955 __ Jump(ra); // Jump to miss handler
956}
957
958
959void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
960 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
961}
962
963
964void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
965 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
Steve Block44f0eee2011-05-26 01:26:41 +0100966}
967
968
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100969static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
970 Deoptimizer::BailoutType type) {
971 {
972 FrameScope scope(masm, StackFrame::INTERNAL);
973 // Pass the function and deoptimization type to the runtime system.
974 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
975 __ push(a0);
976 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
977 }
978
979 // Get the full codegen state from the stack and untag it -> t2.
980 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
981 __ SmiUntag(t2);
982 // Switch on the state.
983 Label with_tos_register, unknown_state;
984 __ Branch(&with_tos_register,
985 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000986 __ Ret(USE_DELAY_SLOT);
987 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100988 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100989
990 __ bind(&with_tos_register);
991 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
992 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
993
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000994 __ Ret(USE_DELAY_SLOT);
995 // Safe to fill delay slot Addu will emit one instruction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100996 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100997
998 __ bind(&unknown_state);
999 __ stop("no cases left");
1000}
1001
1002
Steve Block44f0eee2011-05-26 01:26:41 +01001003void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001004 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
Steve Block44f0eee2011-05-26 01:26:41 +01001005}
1006
1007
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001008void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1009 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1010}
1011
1012
Steve Block44f0eee2011-05-26 01:26:41 +01001013void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001014 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
Steve Block44f0eee2011-05-26 01:26:41 +01001015}
1016
1017
Steve Block44f0eee2011-05-26 01:26:41 +01001018void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001019 // Lookup the function in the JavaScript frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001020 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1021 {
1022 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023 // Pass function as argument.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001024 __ push(a0);
1025 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1026 }
1027
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001028 // If the code object is null, just return to the unoptimized code.
1029 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001030
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031 // Load deoptimization data from the code object.
1032 // <deopt_data> = <code>[#deoptimization_data_offset]
1033 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001034
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001035 // Load the OSR entrypoint offset from the deoptimization data.
1036 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1037 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1038 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1039 __ SmiUntag(a1);
1040
1041 // Compute the target address = code_obj + header_size + osr_offset
1042 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1043 __ addu(v0, v0, a1);
1044 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1045
1046 // And "return" to the OSR entry point of the function.
1047 __ Ret();
1048}
1049
1050
1051void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1052 // We check the stack limit as indicator that recompilation might be done.
1053 Label ok;
1054 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1055 __ Branch(&ok, hs, sp, Operand(at));
1056 {
1057 FrameScope scope(masm, StackFrame::INTERNAL);
1058 __ CallRuntime(Runtime::kStackGuard, 0);
1059 }
1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1061 RelocInfo::CODE_TARGET);
1062
1063 __ bind(&ok);
1064 __ Ret();
Andrei Popescu31002712010-02-23 13:46:05 +00001065}
1066
1067
1068void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001069 // 1. Make sure we have at least one argument.
1070 // a0: actual number of arguments
1071 { Label done;
1072 __ Branch(&done, ne, a0, Operand(zero_reg));
1073 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1074 __ push(t2);
1075 __ Addu(a0, a0, Operand(1));
1076 __ bind(&done);
1077 }
1078
1079 // 2. Get the function to call (passed as receiver) from the stack, check
1080 // if it is a function.
1081 // a0: actual number of arguments
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001082 Label slow, non_function;
Ben Murdoch257744e2011-11-30 15:57:28 +00001083 __ sll(at, a0, kPointerSizeLog2);
1084 __ addu(at, sp, at);
1085 __ lw(a1, MemOperand(at));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001086 __ JumpIfSmi(a1, &non_function);
Ben Murdoch257744e2011-11-30 15:57:28 +00001087 __ GetObjectType(a1, a2, a2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001088 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001089
1090 // 3a. Patch the first argument if necessary when calling a function.
1091 // a0: actual number of arguments
1092 // a1: function
1093 Label shift_arguments;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001094 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1095 { Label convert_to_object, use_global_proxy, patch_receiver;
Ben Murdoch257744e2011-11-30 15:57:28 +00001096 // Change context eagerly in case we need the global receiver.
1097 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1098
1099 // Do not transform the receiver for strict mode functions.
1100 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1101 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001102 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
Ben Murdoch257744e2011-11-30 15:57:28 +00001103 kSmiTagSize)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001104 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001105
1106 // Do not transform the receiver for native (Compilerhints already in a3).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001107 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1108 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001109
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110 // Compute the receiver in sloppy mode.
Ben Murdoch257744e2011-11-30 15:57:28 +00001111 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1112 __ sll(at, a0, kPointerSizeLog2);
1113 __ addu(a2, sp, at);
1114 __ lw(a2, MemOperand(a2, -kPointerSize));
1115 // a0: actual number of arguments
1116 // a1: function
1117 // a2: first argument
1118 __ JumpIfSmi(a2, &convert_to_object, t2);
1119
1120 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001121 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00001122 __ LoadRoot(a3, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001123 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
Ben Murdoch257744e2011-11-30 15:57:28 +00001124
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001125 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00001126 __ GetObjectType(a2, a3, a3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001127 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001128
1129 __ bind(&convert_to_object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001130 // Enter an internal frame in order to preserve argument count.
1131 {
1132 FrameScope scope(masm, StackFrame::INTERNAL);
1133 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001134 __ Push(a0, a2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001135 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1136 __ mov(a2, v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001137
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001138 __ pop(a0);
1139 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1140 // Leave internal frame.
1141 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001142
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001143 // Restore the function to a1, and the flag to t0.
Ben Murdoch257744e2011-11-30 15:57:28 +00001144 __ sll(at, a0, kPointerSizeLog2);
1145 __ addu(at, sp, at);
1146 __ lw(a1, MemOperand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001147 __ Branch(USE_DELAY_SLOT, &patch_receiver);
1148 __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001149
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150 __ bind(&use_global_proxy);
1151 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1152 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001153
1154 __ bind(&patch_receiver);
1155 __ sll(at, a0, kPointerSizeLog2);
1156 __ addu(a3, sp, at);
1157 __ sw(a2, MemOperand(a3, -kPointerSize));
1158
1159 __ Branch(&shift_arguments);
1160 }
1161
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001162 // 3b. Check for function proxy.
1163 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001164 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001165 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1166
1167 __ bind(&non_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001169
1170 // 3c. Patch the first argument when calling a non-function. The
Ben Murdoch257744e2011-11-30 15:57:28 +00001171 // CALL_NON_FUNCTION builtin expects the non-function callee as
1172 // receiver, so overwrite the first argument which will ultimately
1173 // become the receiver.
1174 // a0: actual number of arguments
1175 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001176 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
Ben Murdoch257744e2011-11-30 15:57:28 +00001177 __ sll(at, a0, kPointerSizeLog2);
1178 __ addu(a2, sp, at);
1179 __ sw(a1, MemOperand(a2, -kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001180
1181 // 4. Shift arguments and return address one slot down on the stack
1182 // (overwriting the original receiver). Adjust argument count to make
1183 // the original first argument the new receiver.
1184 // a0: actual number of arguments
1185 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001186 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
Ben Murdoch257744e2011-11-30 15:57:28 +00001187 __ bind(&shift_arguments);
1188 { Label loop;
1189 // Calculate the copy start address (destination). Copy end address is sp.
1190 __ sll(at, a0, kPointerSizeLog2);
1191 __ addu(a2, sp, at);
1192
1193 __ bind(&loop);
1194 __ lw(at, MemOperand(a2, -kPointerSize));
1195 __ sw(at, MemOperand(a2));
1196 __ Subu(a2, a2, Operand(kPointerSize));
1197 __ Branch(&loop, ne, a2, Operand(sp));
1198 // Adjust the actual number of arguments and remove the top element
1199 // (which is a copy of the last argument).
1200 __ Subu(a0, a0, Operand(1));
1201 __ Pop();
1202 }
1203
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001204 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1205 // or a function proxy via CALL_FUNCTION_PROXY.
Ben Murdoch257744e2011-11-30 15:57:28 +00001206 // a0: actual number of arguments
1207 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001208 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1209 { Label function, non_proxy;
1210 __ Branch(&function, eq, t0, Operand(zero_reg));
1211 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1212 __ mov(a2, zero_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001213 __ Branch(&non_proxy, ne, t0, Operand(1));
1214
1215 __ push(a1); // Re-add proxy object as additional argument.
1216 __ Addu(a0, a0, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001218 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1219 RelocInfo::CODE_TARGET);
1220
1221 __ bind(&non_proxy);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001222 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001223 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1224 RelocInfo::CODE_TARGET);
1225 __ bind(&function);
1226 }
1227
1228 // 5b. Get the code to call from the function and check that the number of
1229 // expected arguments matches what we're providing. If so, jump
1230 // (tail-call) to the code in register edx without checking arguments.
1231 // a0: actual number of arguments
1232 // a1: function
1233 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1234 __ lw(a2,
1235 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1236 __ sra(a2, a2, kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001237 // Check formal and actual parameter counts.
1238 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1239 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1240
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001242 ParameterCount expected(0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001243 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
Andrei Popescu31002712010-02-23 13:46:05 +00001244}
1245
1246
1247void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001248 const int kIndexOffset =
1249 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1250 const int kLimitOffset =
1251 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1252 const int kArgsOffset = 2 * kPointerSize;
1253 const int kRecvOffset = 3 * kPointerSize;
1254 const int kFunctionOffset = 4 * kPointerSize;
Ben Murdoch257744e2011-11-30 15:57:28 +00001255
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001256 {
1257 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1258 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1259 __ push(a0);
1260 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1261 __ push(a0);
1262 // Returns (in v0) number of arguments to copy to stack as Smi.
1263 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
Ben Murdoch257744e2011-11-30 15:57:28 +00001264
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001265 // Check the stack for overflow. We are not trying to catch
1266 // interruptions (e.g. debug break and preemption) here, so the "real stack
1267 // limit" is checked.
1268 Label okay;
1269 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1270 // Make a2 the space we have left. The stack might already be overflowed
1271 // here which will cause a2 to become negative.
1272 __ subu(a2, sp, a2);
1273 // Check if the arguments will overflow the stack.
1274 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1275 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
Ben Murdoch257744e2011-11-30 15:57:28 +00001276
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001277 // Out of stack space.
1278 __ lw(a1, MemOperand(fp, kFunctionOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001279 __ Push(a1, v0);
1280 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001281 // End of stack check.
Ben Murdoch257744e2011-11-30 15:57:28 +00001282
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001283 // Push current limit and index.
1284 __ bind(&okay);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 __ mov(a1, zero_reg);
1286 __ Push(v0, a1); // Limit and initial index.
Ben Murdoch257744e2011-11-30 15:57:28 +00001287
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001288 // Get the receiver.
1289 __ lw(a0, MemOperand(fp, kRecvOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001290
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001291 // Check that the function is a JS function (otherwise it must be a proxy).
1292 Label push_receiver;
1293 __ lw(a1, MemOperand(fp, kFunctionOffset));
1294 __ GetObjectType(a1, a2, a2);
1295 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001296
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001297 // Change context eagerly to get the right global object if necessary.
1298 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1299 // Load the shared function info while the function is still in a1.
1300 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001301
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001302 // Compute the receiver.
1303 // Do not transform the receiver for strict mode functions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001304 Label call_to_object, use_global_proxy;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001305 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1306 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1307 kSmiTagSize)));
1308 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001309
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001310 // Do not transform the receiver for native (Compilerhints already in a2).
1311 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1312 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001313
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001314 // Compute the receiver in sloppy mode.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001315 __ JumpIfSmi(a0, &call_to_object);
1316 __ LoadRoot(a1, Heap::kNullValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001317 __ Branch(&use_global_proxy, eq, a0, Operand(a1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001318 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001319 __ Branch(&use_global_proxy, eq, a0, Operand(a2));
Ben Murdoch257744e2011-11-30 15:57:28 +00001320
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001321 // Check if the receiver is already a JavaScript object.
1322 // a0: receiver
1323 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1324 __ GetObjectType(a0, a1, a1);
1325 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001326
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001327 // Convert the receiver to a regular object.
1328 // a0: receiver
1329 __ bind(&call_to_object);
1330 __ push(a0);
1331 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1332 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1333 __ Branch(&push_receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00001334
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001335 __ bind(&use_global_proxy);
1336 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1337 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001338
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001339 // Push the receiver.
1340 // a0: receiver
1341 __ bind(&push_receiver);
1342 __ push(a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001343
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001344 // Copy all arguments from the array to the stack.
1345 Label entry, loop;
1346 __ lw(a0, MemOperand(fp, kIndexOffset));
1347 __ Branch(&entry);
Ben Murdoch257744e2011-11-30 15:57:28 +00001348
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001349 // Load the current argument from the arguments array and push it to the
1350 // stack.
1351 // a0: current argument index
1352 __ bind(&loop);
1353 __ lw(a1, MemOperand(fp, kArgsOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001354 __ Push(a1, a0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001355
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001356 // Call the runtime to access the property in the arguments array.
1357 __ CallRuntime(Runtime::kGetProperty, 2);
1358 __ push(v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001359
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001360 // Use inline caching to access the arguments.
1361 __ lw(a0, MemOperand(fp, kIndexOffset));
1362 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1363 __ sw(a0, MemOperand(fp, kIndexOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001364
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001365 // Test if the copy loop has finished copying all the elements from the
1366 // arguments object.
1367 __ bind(&entry);
1368 __ lw(a1, MemOperand(fp, kLimitOffset));
1369 __ Branch(&loop, ne, a0, Operand(a1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001370
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001371 // Call the function.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001372 Label call_proxy;
1373 ParameterCount actual(a0);
1374 __ sra(a0, a0, kSmiTagSize);
1375 __ lw(a1, MemOperand(fp, kFunctionOffset));
1376 __ GetObjectType(a1, a2, a2);
1377 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1378
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001379 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001380
1381 frame_scope.GenerateLeaveFrame();
1382 __ Ret(USE_DELAY_SLOT);
1383 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1384
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385 // Call the function proxy.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001386 __ bind(&call_proxy);
1387 __ push(a1); // Add function proxy as last argument.
1388 __ Addu(a0, a0, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001389 __ li(a2, Operand(0, RelocInfo::NONE32));
1390 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001391 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1392 RelocInfo::CODE_TARGET);
1393 // Tear down the internal frame and remove function, receiver and args.
1394 }
1395
1396 __ Ret(USE_DELAY_SLOT);
1397 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001398}
1399
1400
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001401static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1402 Label* stack_overflow) {
1403 // ----------- S t a t e -------------
1404 // -- a0 : actual number of arguments
1405 // -- a1 : function (passed through to callee)
1406 // -- a2 : expected number of arguments
1407 // -----------------------------------
1408 // Check the stack for overflow. We are not trying to catch
1409 // interruptions (e.g. debug break and preemption) here, so the "real stack
1410 // limit" is checked.
1411 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1412 // Make t1 the space we have left. The stack might already be overflowed
1413 // here which will cause t1 to become negative.
1414 __ subu(t1, sp, t1);
1415 // Check if the arguments will overflow the stack.
1416 __ sll(at, a2, kPointerSizeLog2);
1417 // Signed comparison.
1418 __ Branch(stack_overflow, le, t1, Operand(at));
1419}
1420
1421
Ben Murdoch257744e2011-11-30 15:57:28 +00001422static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1423 __ sll(a0, a0, kSmiTagSize);
1424 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1425 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001426 __ Addu(fp, sp,
1427 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001428}
1429
1430
1431static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1432 // ----------- S t a t e -------------
1433 // -- v0 : result being passed through
1434 // -----------------------------------
1435 // Get the number of arguments passed (as a smi), tear down the frame and
1436 // then tear down the parameters.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001437 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1438 kPointerSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001439 __ mov(sp, fp);
1440 __ MultiPop(fp.bit() | ra.bit());
1441 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1442 __ Addu(sp, sp, t0);
1443 // Adjust for the receiver.
1444 __ Addu(sp, sp, Operand(kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00001445}
1446
1447
1448void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001449 // State setup as expected by MacroAssembler::InvokePrologue.
1450 // ----------- S t a t e -------------
1451 // -- a0: actual arguments count
1452 // -- a1: function (passed through to callee)
1453 // -- a2: expected arguments count
Ben Murdoch257744e2011-11-30 15:57:28 +00001454 // -----------------------------------
1455
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001456 Label stack_overflow;
1457 ArgumentAdaptorStackCheck(masm, &stack_overflow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001458 Label invoke, dont_adapt_arguments;
1459
1460 Label enough, too_few;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001461 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001462 __ Branch(&dont_adapt_arguments, eq,
1463 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1464 // We use Uless as the number of argument should always be greater than 0.
1465 __ Branch(&too_few, Uless, a0, Operand(a2));
1466
1467 { // Enough parameters: actual >= expected.
1468 // a0: actual number of arguments as a smi
1469 // a1: function
1470 // a2: expected number of arguments
1471 // a3: code entry to call
1472 __ bind(&enough);
1473 EnterArgumentsAdaptorFrame(masm);
1474
1475 // Calculate copy start address into a0 and copy end address into a2.
1476 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1477 __ Addu(a0, fp, a0);
1478 // Adjust for return address and receiver.
1479 __ Addu(a0, a0, Operand(2 * kPointerSize));
1480 // Compute copy end address.
1481 __ sll(a2, a2, kPointerSizeLog2);
1482 __ subu(a2, a0, a2);
1483
1484 // Copy the arguments (including the receiver) to the new stack frame.
1485 // a0: copy start address
1486 // a1: function
1487 // a2: copy end address
1488 // a3: code entry to call
1489
1490 Label copy;
1491 __ bind(&copy);
1492 __ lw(t0, MemOperand(a0));
1493 __ push(t0);
1494 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1495 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1496
1497 __ jmp(&invoke);
1498 }
1499
1500 { // Too few parameters: Actual < expected.
1501 __ bind(&too_few);
1502 EnterArgumentsAdaptorFrame(masm);
1503
Ben Murdoch257744e2011-11-30 15:57:28 +00001504 // Calculate copy start address into a0 and copy end address is fp.
1505 // a0: actual number of arguments as a smi
1506 // a1: function
1507 // a2: expected number of arguments
1508 // a3: code entry to call
1509 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1510 __ Addu(a0, fp, a0);
1511 // Adjust for return address and receiver.
1512 __ Addu(a0, a0, Operand(2 * kPointerSize));
1513 // Compute copy end address. Also adjust for return address.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001514 __ Addu(t3, fp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001515
1516 // Copy the arguments (including the receiver) to the new stack frame.
1517 // a0: copy start address
1518 // a1: function
1519 // a2: expected number of arguments
1520 // a3: code entry to call
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001521 // t3: copy end address
Ben Murdoch257744e2011-11-30 15:57:28 +00001522 Label copy;
1523 __ bind(&copy);
1524 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001525 __ Subu(sp, sp, kPointerSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001526 __ Subu(a0, a0, kPointerSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001527 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
1528 __ sw(t0, MemOperand(sp)); // In the delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001529
1530 // Fill the remaining expected arguments with undefined.
1531 // a1: function
1532 // a2: expected number of arguments
1533 // a3: code entry to call
1534 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1535 __ sll(t2, a2, kPointerSizeLog2);
1536 __ Subu(a2, fp, Operand(t2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001537 // Adjust for frame.
1538 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1539 2 * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001540
1541 Label fill;
1542 __ bind(&fill);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001543 __ Subu(sp, sp, kPointerSize);
1544 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1545 __ sw(t0, MemOperand(sp));
Ben Murdoch257744e2011-11-30 15:57:28 +00001546 }
1547
1548 // Call the entry point.
1549 __ bind(&invoke);
1550
1551 __ Call(a3);
1552
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001553 // Store offset of return address for deoptimizer.
1554 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1555
Ben Murdoch257744e2011-11-30 15:57:28 +00001556 // Exit frame and return.
1557 LeaveArgumentsAdaptorFrame(masm);
1558 __ Ret();
1559
1560
1561 // -------------------------------------------
1562 // Don't adapt arguments.
1563 // -------------------------------------------
1564 __ bind(&dont_adapt_arguments);
1565 __ Jump(a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001566
1567 __ bind(&stack_overflow);
1568 {
1569 FrameScope frame(masm, StackFrame::MANUAL);
1570 EnterArgumentsAdaptorFrame(masm);
1571 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1572 __ break_(0xCC);
1573 }
Andrei Popescu31002712010-02-23 13:46:05 +00001574}
1575
1576
1577#undef __
1578
1579} } // namespace v8::internal
1580
Leon Clarkef7060e22010-06-03 12:02:55 +01001581#endif // V8_TARGET_ARCH_MIPS