blob: 5702c788b3ff5d78e782f480b217bf904cf0e361 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS64
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-stubs.h"
Ben Murdochda12d292016-06-02 14:46:10 +01008#include "src/api-arguments.h"
9#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/codegen.h"
11#include "src/ic/handler-compiler.h"
12#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015#include "src/mips64/code-stubs-mips64.h"
16#include "src/regexp/jsregexp.h"
17#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040018#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000019
20namespace v8 {
21namespace internal {
22
23
24static void InitializeArrayConstructorDescriptor(
25 Isolate* isolate, CodeStubDescriptor* descriptor,
26 int constant_stack_parameter_count) {
27 Address deopt_handler = Runtime::FunctionForId(
28 Runtime::kArrayConstructor)->entry;
29
30 if (constant_stack_parameter_count == 0) {
31 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
32 JS_FUNCTION_STUB_MODE);
33 } else {
34 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036 }
37}
38
39
40static void InitializeInternalArrayConstructorDescriptor(
41 Isolate* isolate, CodeStubDescriptor* descriptor,
42 int constant_stack_parameter_count) {
43 Address deopt_handler = Runtime::FunctionForId(
44 Runtime::kInternalArrayConstructor)->entry;
45
46 if (constant_stack_parameter_count == 0) {
47 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
48 JS_FUNCTION_STUB_MODE);
49 } else {
50 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000051 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 }
53}
54
55
Ben Murdochb8a8cc12014-11-26 15:28:44 +000056void ArraySingleArgumentConstructorStub::InitializeDescriptor(
57 CodeStubDescriptor* descriptor) {
58 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
59}
60
61
62void ArrayNArgumentsConstructorStub::InitializeDescriptor(
63 CodeStubDescriptor* descriptor) {
64 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
65}
66
67
Ben Murdochda12d292016-06-02 14:46:10 +010068void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
69 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
70 descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
71}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072
73void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
74 CodeStubDescriptor* descriptor) {
75 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
76}
77
78
79void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
80 CodeStubDescriptor* descriptor) {
81 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
82}
83
84
85#define __ ACCESS_MASM(masm)
86
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +010088 Condition cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089static void EmitSmiNonsmiComparison(MacroAssembler* masm,
90 Register lhs,
91 Register rhs,
92 Label* rhs_not_nan,
93 Label* slow,
94 bool strict);
95static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
96 Register lhs,
97 Register rhs);
98
99
100void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
101 ExternalReference miss) {
102 // Update the static counter each time a new code stub is generated.
103 isolate()->counters()->code_stubs()->Increment();
104
105 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000106 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 {
108 // Call the runtime system in a fresh internal frame.
109 FrameScope scope(masm, StackFrame::INTERNAL);
110 DCHECK((param_count == 0) ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 a0.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112 // Push arguments, adjust sp.
113 __ Dsubu(sp, sp, Operand(param_count * kPointerSize));
114 for (int i = 0; i < param_count; ++i) {
115 // Store argument to stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000116 __ sd(descriptor.GetRegisterParameter(i),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117 MemOperand(sp, (param_count - 1 - i) * kPointerSize));
118 }
119 __ CallExternalReference(miss, param_count);
120 }
121
122 __ Ret();
123}
124
125
126void DoubleToIStub::Generate(MacroAssembler* masm) {
127 Label out_of_range, only_low, negate, done;
128 Register input_reg = source();
129 Register result_reg = destination();
130
131 int double_offset = offset();
132 // Account for saved regs if input is sp.
133 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
134
135 Register scratch =
136 GetRegisterThatIsNotOneOf(input_reg, result_reg);
137 Register scratch2 =
138 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
139 Register scratch3 =
140 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch2);
141 DoubleRegister double_scratch = kLithiumScratchDouble;
142
143 __ Push(scratch, scratch2, scratch3);
144 if (!skip_fastpath()) {
145 // Load double input.
146 __ ldc1(double_scratch, MemOperand(input_reg, double_offset));
147
148 // Clear cumulative exception flags and save the FCSR.
149 __ cfc1(scratch2, FCSR);
150 __ ctc1(zero_reg, FCSR);
151
152 // Try a conversion to a signed integer.
153 __ Trunc_w_d(double_scratch, double_scratch);
154 // Move the converted value into the result register.
155 __ mfc1(scratch3, double_scratch);
156
157 // Retrieve and restore the FCSR.
158 __ cfc1(scratch, FCSR);
159 __ ctc1(scratch2, FCSR);
160
161 // Check for overflow and NaNs.
162 __ And(
163 scratch, scratch,
164 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask
165 | kFCSRInvalidOpFlagMask);
166 // If we had no exceptions then set result_reg and we are done.
167 Label error;
168 __ Branch(&error, ne, scratch, Operand(zero_reg));
169 __ Move(result_reg, scratch3);
170 __ Branch(&done);
171 __ bind(&error);
172 }
173
174 // Load the double value and perform a manual truncation.
175 Register input_high = scratch2;
176 Register input_low = scratch3;
177
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000178 __ lw(input_low,
179 MemOperand(input_reg, double_offset + Register::kMantissaOffset));
180 __ lw(input_high,
181 MemOperand(input_reg, double_offset + Register::kExponentOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182
183 Label normal_exponent, restore_sign;
184 // Extract the biased exponent in result.
185 __ Ext(result_reg,
186 input_high,
187 HeapNumber::kExponentShift,
188 HeapNumber::kExponentBits);
189
190 // Check for Infinity and NaNs, which should return 0.
191 __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
192 __ Movz(result_reg, zero_reg, scratch);
193 __ Branch(&done, eq, scratch, Operand(zero_reg));
194
195 // Express exponent as delta to (number of mantissa bits + 31).
196 __ Subu(result_reg,
197 result_reg,
198 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
199
200 // If the delta is strictly positive, all bits would be shifted away,
201 // which means that we can return 0.
202 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
203 __ mov(result_reg, zero_reg);
204 __ Branch(&done);
205
206 __ bind(&normal_exponent);
207 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
208 // Calculate shift.
209 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
210
211 // Save the sign.
212 Register sign = result_reg;
213 result_reg = no_reg;
214 __ And(sign, input_high, Operand(HeapNumber::kSignMask));
215
216 // On ARM shifts > 31 bits are valid and will result in zero. On MIPS we need
217 // to check for this specific case.
218 Label high_shift_needed, high_shift_done;
219 __ Branch(&high_shift_needed, lt, scratch, Operand(32));
220 __ mov(input_high, zero_reg);
221 __ Branch(&high_shift_done);
222 __ bind(&high_shift_needed);
223
224 // Set the implicit 1 before the mantissa part in input_high.
225 __ Or(input_high,
226 input_high,
227 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
228 // Shift the mantissa bits to the correct position.
229 // We don't need to clear non-mantissa bits as they will be shifted away.
230 // If they weren't, it would mean that the answer is in the 32bit range.
231 __ sllv(input_high, input_high, scratch);
232
233 __ bind(&high_shift_done);
234
235 // Replace the shifted bits with bits from the lower mantissa word.
236 Label pos_shift, shift_done;
237 __ li(at, 32);
238 __ subu(scratch, at, scratch);
239 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
240
241 // Negate scratch.
242 __ Subu(scratch, zero_reg, scratch);
243 __ sllv(input_low, input_low, scratch);
244 __ Branch(&shift_done);
245
246 __ bind(&pos_shift);
247 __ srlv(input_low, input_low, scratch);
248
249 __ bind(&shift_done);
250 __ Or(input_high, input_high, Operand(input_low));
251 // Restore sign if necessary.
252 __ mov(scratch, sign);
253 result_reg = sign;
254 sign = no_reg;
255 __ Subu(result_reg, zero_reg, input_high);
256 __ Movz(result_reg, input_high, scratch);
257
258 __ bind(&done);
259
260 __ Pop(scratch, scratch2, scratch3);
261 __ Ret();
262}
263
264
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000265// Handle the case where the lhs and rhs are the same object.
266// Equality is almost reflexive (everything but NaN), so this is a test
267// for "identity and not NaN".
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000270 Label not_identical;
271 Label heap_number, return_equal;
272 Register exp_mask_reg = t1;
273
274 __ Branch(&not_identical, ne, a0, Operand(a1));
275
276 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask));
277
278 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
279 // so we do the second best thing - test it ourselves.
280 // They are both equal and they are not both Smis so both of them are not
281 // Smis. If it's not a heap number, then return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000282 __ GetObjectType(a0, t0, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 if (cc == less || cc == greater) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 // Call runtime on identical JSObjects.
285 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE));
286 // Call runtime on identical symbols since we need to throw a TypeError.
287 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE));
288 // Call runtime on identical SIMD values since we must throw a TypeError.
289 __ Branch(slow, eq, t0, Operand(SIMD128_VALUE_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 __ Branch(&heap_number, eq, t0, Operand(HEAP_NUMBER_TYPE));
292 // Comparing JS objects with <=, >= is complicated.
293 if (cc != eq) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE));
295 // Call runtime on identical symbols since we need to throw a TypeError.
296 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE));
297 // Call runtime on identical SIMD values since we must throw a TypeError.
298 __ Branch(slow, eq, t0, Operand(SIMD128_VALUE_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000299 // Normally here we fall through to return_equal, but undefined is
300 // special: (undefined == undefined) == true, but
301 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
302 if (cc == less_equal || cc == greater_equal) {
303 __ Branch(&return_equal, ne, t0, Operand(ODDBALL_TYPE));
304 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
305 __ Branch(&return_equal, ne, a0, Operand(a6));
306 DCHECK(is_int16(GREATER) && is_int16(LESS));
307 __ Ret(USE_DELAY_SLOT);
308 if (cc == le) {
309 // undefined <= undefined should fail.
310 __ li(v0, Operand(GREATER));
311 } else {
312 // undefined >= undefined should fail.
313 __ li(v0, Operand(LESS));
314 }
315 }
316 }
317 }
318
319 __ bind(&return_equal);
320 DCHECK(is_int16(GREATER) && is_int16(LESS));
321 __ Ret(USE_DELAY_SLOT);
322 if (cc == less) {
323 __ li(v0, Operand(GREATER)); // Things aren't less than themselves.
324 } else if (cc == greater) {
325 __ li(v0, Operand(LESS)); // Things aren't greater than themselves.
326 } else {
327 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves.
328 }
329 // For less and greater we don't have to check for NaN since the result of
330 // x < x is false regardless. For the others here is some code to check
331 // for NaN.
332 if (cc != lt && cc != gt) {
333 __ bind(&heap_number);
334 // It is a heap number, so return non-equal if it's NaN and equal if it's
335 // not NaN.
336
337 // The representation of NaN values has all exponent bits (52..62) set,
338 // and not all mantissa bits (0..51) clear.
339 // Read top bits of double representation (second word of value).
340 __ lwu(a6, FieldMemOperand(a0, HeapNumber::kExponentOffset));
341 // Test that exponent bits are all set.
342 __ And(a7, a6, Operand(exp_mask_reg));
343 // If all bits not set (ne cond), then not a NaN, objects are equal.
344 __ Branch(&return_equal, ne, a7, Operand(exp_mask_reg));
345
346 // Shift out flag and all exponent bits, retaining only mantissa.
347 __ sll(a6, a6, HeapNumber::kNonMantissaBitsInTopWord);
348 // Or with all low-bits of mantissa.
349 __ lwu(a7, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
350 __ Or(v0, a7, Operand(a6));
351 // For equal we already have the right value in v0: Return zero (equal)
352 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
353 // not (it's a NaN). For <= and >= we need to load v0 with the failing
354 // value if it's a NaN.
355 if (cc != eq) {
356 // All-zero means Infinity means equal.
357 __ Ret(eq, v0, Operand(zero_reg));
358 DCHECK(is_int16(GREATER) && is_int16(LESS));
359 __ Ret(USE_DELAY_SLOT);
360 if (cc == le) {
361 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail.
362 } else {
363 __ li(v0, Operand(LESS)); // NaN >= NaN should fail.
364 }
365 }
366 }
367 // No fall through here.
368
369 __ bind(&not_identical);
370}
371
372
373static void EmitSmiNonsmiComparison(MacroAssembler* masm,
374 Register lhs,
375 Register rhs,
376 Label* both_loaded_as_doubles,
377 Label* slow,
378 bool strict) {
379 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
380 (lhs.is(a1) && rhs.is(a0)));
381
382 Label lhs_is_smi;
383 __ JumpIfSmi(lhs, &lhs_is_smi);
384 // Rhs is a Smi.
385 // Check whether the non-smi is a heap number.
386 __ GetObjectType(lhs, t0, t0);
387 if (strict) {
388 // If lhs was not a number and rhs was a Smi then strict equality cannot
389 // succeed. Return non-equal (lhs is already not zero).
390 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE));
391 __ mov(v0, lhs);
392 } else {
393 // Smi compared non-strictly with a non-Smi non-heap-number. Call
394 // the runtime.
395 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE));
396 }
397 // Rhs is a smi, lhs is a number.
398 // Convert smi rhs to double.
399 __ SmiUntag(at, rhs);
400 __ mtc1(at, f14);
401 __ cvt_d_w(f14, f14);
402 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
403
404 // We now have both loaded as doubles.
405 __ jmp(both_loaded_as_doubles);
406
407 __ bind(&lhs_is_smi);
408 // Lhs is a Smi. Check whether the non-smi is a heap number.
409 __ GetObjectType(rhs, t0, t0);
410 if (strict) {
411 // If lhs was not a number and rhs was a Smi then strict equality cannot
412 // succeed. Return non-equal.
413 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE));
414 __ li(v0, Operand(1));
415 } else {
416 // Smi compared non-strictly with a non-Smi non-heap-number. Call
417 // the runtime.
418 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE));
419 }
420
421 // Lhs is a smi, rhs is a number.
422 // Convert smi lhs to double.
423 __ SmiUntag(at, lhs);
424 __ mtc1(at, f12);
425 __ cvt_d_w(f12, f12);
426 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
427 // Fall through to both_loaded_as_doubles.
428}
429
430
431static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
432 Register lhs,
433 Register rhs) {
434 // If either operand is a JS object or an oddball value, then they are
435 // not equal since their pointers are different.
436 // There is no test for undetectability in strict equality.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000437 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438 Label first_non_object;
439 // Get the type of the first operand into a2 and compare it with
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000440 // FIRST_JS_RECEIVER_TYPE.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441 __ GetObjectType(lhs, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000442 __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443
444 // Return non-zero.
445 Label return_not_equal;
446 __ bind(&return_not_equal);
447 __ Ret(USE_DELAY_SLOT);
448 __ li(v0, Operand(1));
449
450 __ bind(&first_non_object);
451 // Check for oddballs: true, false, null, undefined.
452 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
453
454 __ GetObjectType(rhs, a3, a3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456
457 // Check for oddballs: true, false, null, undefined.
458 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
459
460 // Now that we have the types we might as well check for
461 // internalized-internalized.
462 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
463 __ Or(a2, a2, Operand(a3));
464 __ And(at, a2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
465 __ Branch(&return_not_equal, eq, at, Operand(zero_reg));
466}
467
468
469static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
470 Register lhs,
471 Register rhs,
472 Label* both_loaded_as_doubles,
473 Label* not_heap_numbers,
474 Label* slow) {
475 __ GetObjectType(lhs, a3, a2);
476 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE));
477 __ ld(a2, FieldMemOperand(rhs, HeapObject::kMapOffset));
478 // If first was a heap number & second wasn't, go to slow case.
479 __ Branch(slow, ne, a3, Operand(a2));
480
481 // Both are heap numbers. Load them up then jump to the code we have
482 // for that.
483 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
484 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
485
486 __ jmp(both_loaded_as_doubles);
487}
488
489
490// Fast negative check for internalized-to-internalized equality.
491static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100492 Register lhs, Register rhs,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493 Label* possible_strings,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100494 Label* runtime_call) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
496 (lhs.is(a1) && rhs.is(a0)));
497
498 // a2 is object type of rhs.
Ben Murdochda12d292016-06-02 14:46:10 +0100499 Label object_test, return_equal, return_unequal, undetectable;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
501 __ And(at, a2, Operand(kIsNotStringMask));
502 __ Branch(&object_test, ne, at, Operand(zero_reg));
503 __ And(at, a2, Operand(kIsNotInternalizedMask));
504 __ Branch(possible_strings, ne, at, Operand(zero_reg));
505 __ GetObjectType(rhs, a3, a3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100506 __ Branch(runtime_call, ge, a3, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 __ And(at, a3, Operand(kIsNotInternalizedMask));
508 __ Branch(possible_strings, ne, at, Operand(zero_reg));
509
Ben Murdoch097c5b22016-05-18 11:27:45 +0100510 // Both are internalized. We already checked they weren't the same pointer so
511 // they are not equal. Return non-equal by returning the non-zero object
512 // pointer in v0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513 __ Ret(USE_DELAY_SLOT);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100514 __ mov(v0, a0); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515
516 __ bind(&object_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100517 __ ld(a2, FieldMemOperand(lhs, HeapObject::kMapOffset));
518 __ ld(a3, FieldMemOperand(rhs, HeapObject::kMapOffset));
519 __ lbu(t0, FieldMemOperand(a2, Map::kBitFieldOffset));
520 __ lbu(t1, FieldMemOperand(a3, Map::kBitFieldOffset));
521 __ And(at, t0, Operand(1 << Map::kIsUndetectable));
522 __ Branch(&undetectable, ne, at, Operand(zero_reg));
523 __ And(at, t1, Operand(1 << Map::kIsUndetectable));
524 __ Branch(&return_unequal, ne, at, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525
Ben Murdoch097c5b22016-05-18 11:27:45 +0100526 __ GetInstanceType(a2, a2);
527 __ Branch(runtime_call, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
528 __ GetInstanceType(a3, a3);
529 __ Branch(runtime_call, lt, a3, Operand(FIRST_JS_RECEIVER_TYPE));
530
531 __ bind(&return_unequal);
532 // Return non-equal by returning the non-zero object pointer in v0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000533 __ Ret(USE_DELAY_SLOT);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100534 __ mov(v0, a0); // In delay slot.
535
536 __ bind(&undetectable);
537 __ And(at, t1, Operand(1 << Map::kIsUndetectable));
538 __ Branch(&return_unequal, eq, at, Operand(zero_reg));
Ben Murdochda12d292016-06-02 14:46:10 +0100539
540 // If both sides are JSReceivers, then the result is false according to
541 // the HTML specification, which says that only comparisons with null or
542 // undefined are affected by special casing for document.all.
543 __ GetInstanceType(a2, a2);
544 __ Branch(&return_equal, eq, a2, Operand(ODDBALL_TYPE));
545 __ GetInstanceType(a3, a3);
546 __ Branch(&return_unequal, ne, a3, Operand(ODDBALL_TYPE));
547
548 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100549 __ Ret(USE_DELAY_SLOT);
550 __ li(v0, Operand(EQUAL)); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000551}
552
553
554static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
555 Register scratch,
556 CompareICState::State expected,
557 Label* fail) {
558 Label ok;
559 if (expected == CompareICState::SMI) {
560 __ JumpIfNotSmi(input, fail);
561 } else if (expected == CompareICState::NUMBER) {
562 __ JumpIfSmi(input, &ok);
563 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
564 DONT_DO_SMI_CHECK);
565 }
566 // We could be strict about internalized/string here, but as long as
567 // hydrogen doesn't care, the stub doesn't have to care either.
568 __ bind(&ok);
569}
570
571
572// On entry a1 and a2 are the values to be compared.
573// On exit a0 is 0, positive or negative to indicate the result of
574// the comparison.
575void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
576 Register lhs = a1;
577 Register rhs = a0;
578 Condition cc = GetCondition();
579
580 Label miss;
581 CompareICStub_CheckInputType(masm, lhs, a2, left(), &miss);
582 CompareICStub_CheckInputType(masm, rhs, a3, right(), &miss);
583
584 Label slow; // Call builtin.
585 Label not_smis, both_loaded_as_doubles;
586
587 Label not_two_smis, smi_done;
588 __ Or(a2, a1, a0);
589 __ JumpIfNotSmi(a2, &not_two_smis);
590 __ SmiUntag(a1);
591 __ SmiUntag(a0);
592
593 __ Ret(USE_DELAY_SLOT);
594 __ dsubu(v0, a1, a0);
595 __ bind(&not_two_smis);
596
597 // NOTICE! This code is only reached after a smi-fast-case check, so
598 // it is certain that at least one operand isn't a smi.
599
600 // Handle the case where the objects are identical. Either returns the answer
601 // or goes to slow. Only falls through if the objects were not identical.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100602 EmitIdenticalObjectComparison(masm, &slow, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603
604 // If either is a Smi (we know that not both are), then they can only
605 // be strictly equal if the other is a HeapNumber.
606 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 __ And(a6, lhs, Operand(rhs));
609 __ JumpIfNotSmi(a6, &not_smis, a4);
610 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
611 // 1) Return the answer.
612 // 2) Go to slow.
613 // 3) Fall through to both_loaded_as_doubles.
614 // 4) Jump to rhs_not_nan.
615 // In cases 3 and 4 we have found out we were dealing with a number-number
616 // comparison and the numbers have been loaded into f12 and f14 as doubles,
617 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
618 EmitSmiNonsmiComparison(masm, lhs, rhs,
619 &both_loaded_as_doubles, &slow, strict());
620
621 __ bind(&both_loaded_as_doubles);
622 // f12, f14 are the double representations of the left hand side
623 // and the right hand side if we have FPU. Otherwise a2, a3 represent
624 // left hand side and a0, a1 represent right hand side.
625
626 Label nan;
627 __ li(a4, Operand(LESS));
628 __ li(a5, Operand(GREATER));
629 __ li(a6, Operand(EQUAL));
630
631 // Check if either rhs or lhs is NaN.
632 __ BranchF(NULL, &nan, eq, f12, f14);
633
634 // Check if LESS condition is satisfied. If true, move conditionally
635 // result to v0.
636 if (kArchVariant != kMips64r6) {
637 __ c(OLT, D, f12, f14);
638 __ Movt(v0, a4);
639 // Use previous check to store conditionally to v0 oposite condition
640 // (GREATER). If rhs is equal to lhs, this will be corrected in next
641 // check.
642 __ Movf(v0, a5);
643 // Check if EQUAL condition is satisfied. If true, move conditionally
644 // result to v0.
645 __ c(EQ, D, f12, f14);
646 __ Movt(v0, a6);
647 } else {
648 Label skip;
649 __ BranchF(USE_DELAY_SLOT, &skip, NULL, lt, f12, f14);
650 __ mov(v0, a4); // Return LESS as result.
651
652 __ BranchF(USE_DELAY_SLOT, &skip, NULL, eq, f12, f14);
653 __ mov(v0, a6); // Return EQUAL as result.
654
655 __ mov(v0, a5); // Return GREATER as result.
656 __ bind(&skip);
657 }
658 __ Ret();
659
660 __ bind(&nan);
661 // NaN comparisons always fail.
662 // Load whatever we need in v0 to make the comparison fail.
663 DCHECK(is_int16(GREATER) && is_int16(LESS));
664 __ Ret(USE_DELAY_SLOT);
665 if (cc == lt || cc == le) {
666 __ li(v0, Operand(GREATER));
667 } else {
668 __ li(v0, Operand(LESS));
669 }
670
671
672 __ bind(&not_smis);
673 // At this point we know we are dealing with two different objects,
674 // and neither of them is a Smi. The objects are in lhs_ and rhs_.
675 if (strict()) {
676 // This returns non-equal for some object types, or falls through if it
677 // was not lucky.
678 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
679 }
680
681 Label check_for_internalized_strings;
682 Label flat_string_check;
683 // Check for heap-number-heap-number comparison. Can jump to slow case,
684 // or load both doubles and jump to the code that handles
685 // that case. If the inputs are not doubles then jumps to
686 // check_for_internalized_strings.
687 // In this case a2 will contain the type of lhs_.
688 EmitCheckForTwoHeapNumbers(masm,
689 lhs,
690 rhs,
691 &both_loaded_as_doubles,
692 &check_for_internalized_strings,
693 &flat_string_check);
694
695 __ bind(&check_for_internalized_strings);
696 if (cc == eq && !strict()) {
697 // Returns an answer for two internalized strings or two
698 // detectable objects.
699 // Otherwise jumps to string case or not both strings case.
700 // Assumes that a2 is the type of lhs_ on entry.
701 EmitCheckForInternalizedStringsOrObjects(
702 masm, lhs, rhs, &flat_string_check, &slow);
703 }
704
705 // Check for both being sequential one-byte strings,
706 // and inline if that is the case.
707 __ bind(&flat_string_check);
708
709 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow);
710
711 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
712 a3);
713 if (cc == eq) {
714 StringHelper::GenerateFlatOneByteStringEquals(masm, lhs, rhs, a2, a3, a4);
715 } else {
716 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, a2, a3, a4,
717 a5);
718 }
719 // Never falls through to here.
720
721 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000722 if (cc == eq) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100723 {
724 FrameScope scope(masm, StackFrame::INTERNAL);
725 __ Push(lhs, rhs);
726 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
727 }
728 // Turn true into 0 and false into some non-zero value.
729 STATIC_ASSERT(EQUAL == 0);
730 __ LoadRoot(a0, Heap::kTrueValueRootIndex);
731 __ Ret(USE_DELAY_SLOT);
732 __ subu(v0, v0, a0); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000733 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100734 // Prepare for call to builtin. Push object pointers, a0 (lhs) first,
735 // a1 (rhs) second.
736 __ Push(lhs, rhs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737 int ncr; // NaN compare result.
738 if (cc == lt || cc == le) {
739 ncr = GREATER;
740 } else {
741 DCHECK(cc == gt || cc == ge); // Remaining cases.
742 ncr = LESS;
743 }
744 __ li(a0, Operand(Smi::FromInt(ncr)));
745 __ push(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000746
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
748 // tagged as a small integer.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100749 __ TailCallRuntime(Runtime::kCompare);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000750 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751
752 __ bind(&miss);
753 GenerateMiss(masm);
754}
755
756
757void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
758 __ mov(t9, ra);
759 __ pop(ra);
760 __ PushSafepointRegisters();
761 __ Jump(t9);
762}
763
764
765void RestoreRegistersStateStub::Generate(MacroAssembler* masm) {
766 __ mov(t9, ra);
767 __ pop(ra);
768 __ PopSafepointRegisters();
769 __ Jump(t9);
770}
771
772
773void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
774 // We don't allow a GC during a store buffer overflow so there is no need to
775 // store the registers in any particular way, but we do have to store and
776 // restore them.
777 __ MultiPush(kJSCallerSaved | ra.bit());
778 if (save_doubles()) {
779 __ MultiPushFPU(kCallerSavedFPU);
780 }
781 const int argument_count = 1;
782 const int fp_argument_count = 0;
783 const Register scratch = a1;
784
785 AllowExternalCallThatCantCauseGC scope(masm);
786 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
787 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
788 __ CallCFunction(
789 ExternalReference::store_buffer_overflow_function(isolate()),
790 argument_count);
791 if (save_doubles()) {
792 __ MultiPopFPU(kCallerSavedFPU);
793 }
794
795 __ MultiPop(kJSCallerSaved | ra.bit());
796 __ Ret();
797}
798
799
800void MathPowStub::Generate(MacroAssembler* masm) {
801 const Register base = a1;
802 const Register exponent = MathPowTaggedDescriptor::exponent();
803 DCHECK(exponent.is(a2));
804 const Register heapnumbermap = a5;
805 const Register heapnumber = v0;
806 const DoubleRegister double_base = f2;
807 const DoubleRegister double_exponent = f4;
808 const DoubleRegister double_result = f0;
809 const DoubleRegister double_scratch = f6;
810 const FPURegister single_scratch = f8;
811 const Register scratch = t1;
812 const Register scratch2 = a7;
813
814 Label call_runtime, done, int_exponent;
815 if (exponent_type() == ON_STACK) {
816 Label base_is_smi, unpack_exponent;
817 // The exponent and base are supplied as arguments on the stack.
818 // This can only happen if the stub is called from non-optimized code.
819 // Load input parameters from stack to double registers.
820 __ ld(base, MemOperand(sp, 1 * kPointerSize));
821 __ ld(exponent, MemOperand(sp, 0 * kPointerSize));
822
823 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
824
825 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
826 __ ld(scratch, FieldMemOperand(base, JSObject::kMapOffset));
827 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
828
829 __ ldc1(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
830 __ jmp(&unpack_exponent);
831
832 __ bind(&base_is_smi);
833 __ mtc1(scratch, single_scratch);
834 __ cvt_d_w(double_base, single_scratch);
835 __ bind(&unpack_exponent);
836
837 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
838
839 __ ld(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
840 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
841 __ ldc1(double_exponent,
842 FieldMemOperand(exponent, HeapNumber::kValueOffset));
843 } else if (exponent_type() == TAGGED) {
844 // Base is already in double_base.
845 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
846
847 __ ldc1(double_exponent,
848 FieldMemOperand(exponent, HeapNumber::kValueOffset));
849 }
850
851 if (exponent_type() != INTEGER) {
852 Label int_exponent_convert;
853 // Detect integer exponents stored as double.
854 __ EmitFPUTruncate(kRoundToMinusInf,
855 scratch,
856 double_exponent,
857 at,
858 double_scratch,
859 scratch2,
860 kCheckForInexactConversion);
861 // scratch2 == 0 means there was no conversion error.
862 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
863
864 if (exponent_type() == ON_STACK) {
865 // Detect square root case. Crankshaft detects constant +/-0.5 at
866 // compile time and uses DoMathPowHalf instead. We then skip this check
867 // for non-constant cases of +/-0.5 as these hardly occur.
868 Label not_plus_half;
869
870 // Test for 0.5.
871 __ Move(double_scratch, 0.5);
872 __ BranchF(USE_DELAY_SLOT,
873 &not_plus_half,
874 NULL,
875 ne,
876 double_exponent,
877 double_scratch);
878 // double_scratch can be overwritten in the delay slot.
879 // Calculates square root of base. Check for the special case of
880 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400881 __ Move(double_scratch, static_cast<double>(-V8_INFINITY));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000882 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
883 __ neg_d(double_result, double_scratch);
884
885 // Add +0 to convert -0 to +0.
886 __ add_d(double_scratch, double_base, kDoubleRegZero);
887 __ sqrt_d(double_result, double_scratch);
888 __ jmp(&done);
889
890 __ bind(&not_plus_half);
891 __ Move(double_scratch, -0.5);
892 __ BranchF(USE_DELAY_SLOT,
893 &call_runtime,
894 NULL,
895 ne,
896 double_exponent,
897 double_scratch);
898 // double_scratch can be overwritten in the delay slot.
899 // Calculates square root of base. Check for the special case of
900 // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400901 __ Move(double_scratch, static_cast<double>(-V8_INFINITY));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
903 __ Move(double_result, kDoubleRegZero);
904
905 // Add +0 to convert -0 to +0.
906 __ add_d(double_scratch, double_base, kDoubleRegZero);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400907 __ Move(double_result, 1.);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 __ sqrt_d(double_scratch, double_scratch);
909 __ div_d(double_result, double_result, double_scratch);
910 __ jmp(&done);
911 }
912
913 __ push(ra);
914 {
915 AllowExternalCallThatCantCauseGC scope(masm);
916 __ PrepareCallCFunction(0, 2, scratch2);
917 __ MovToFloatParameters(double_base, double_exponent);
918 __ CallCFunction(
919 ExternalReference::power_double_double_function(isolate()),
920 0, 2);
921 }
922 __ pop(ra);
923 __ MovFromFloatResult(double_result);
924 __ jmp(&done);
925
926 __ bind(&int_exponent_convert);
927 }
928
929 // Calculate power with integer exponent.
930 __ bind(&int_exponent);
931
932 // Get two copies of exponent in the registers scratch and exponent.
933 if (exponent_type() == INTEGER) {
934 __ mov(scratch, exponent);
935 } else {
936 // Exponent has previously been stored into scratch as untagged integer.
937 __ mov(exponent, scratch);
938 }
939
940 __ mov_d(double_scratch, double_base); // Back up base.
941 __ Move(double_result, 1.0);
942
943 // Get absolute value of exponent.
944 Label positive_exponent;
945 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
946 __ Dsubu(scratch, zero_reg, scratch);
947 __ bind(&positive_exponent);
948
949 Label while_true, no_carry, loop_end;
950 __ bind(&while_true);
951
952 __ And(scratch2, scratch, 1);
953
954 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
955 __ mul_d(double_result, double_result, double_scratch);
956 __ bind(&no_carry);
957
958 __ dsra(scratch, scratch, 1);
959
960 __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
961 __ mul_d(double_scratch, double_scratch, double_scratch);
962
963 __ Branch(&while_true);
964
965 __ bind(&loop_end);
966
967 __ Branch(&done, ge, exponent, Operand(zero_reg));
968 __ Move(double_scratch, 1.0);
969 __ div_d(double_result, double_scratch, double_result);
970 // Test whether result is zero. Bail out to check for subnormal result.
971 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
972 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero);
973
974 // double_exponent may not contain the exponent value if the input was a
975 // smi. We set it with exponent value before bailing out.
976 __ mtc1(exponent, single_scratch);
977 __ cvt_d_w(double_exponent, single_scratch);
978
979 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000980 if (exponent_type() == ON_STACK) {
981 // The arguments are still on the stack.
982 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000983 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000984
985 // The stub is called from non-optimized code, which expects the result
986 // as heap number in exponent.
987 __ bind(&done);
988 __ AllocateHeapNumber(
989 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
990 __ sdc1(double_result,
991 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
992 DCHECK(heapnumber.is(v0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 __ DropAndRet(2);
994 } else {
995 __ push(ra);
996 {
997 AllowExternalCallThatCantCauseGC scope(masm);
998 __ PrepareCallCFunction(0, 2, scratch);
999 __ MovToFloatParameters(double_base, double_exponent);
1000 __ CallCFunction(
1001 ExternalReference::power_double_double_function(isolate()),
1002 0, 2);
1003 }
1004 __ pop(ra);
1005 __ MovFromFloatResult(double_result);
1006
1007 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001008 __ Ret();
1009 }
1010}
1011
1012
1013bool CEntryStub::NeedsImmovableCode() {
1014 return true;
1015}
1016
1017
1018void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1019 CEntryStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001020 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1021 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1022 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1023 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001025 BinaryOpICStub::GenerateAheadOfTime(isolate);
1026 StoreRegistersStateStub::GenerateAheadOfTime(isolate);
1027 RestoreRegistersStateStub::GenerateAheadOfTime(isolate);
1028 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001029 StoreFastElementStub::GenerateAheadOfTime(isolate);
1030 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031}
1032
1033
1034void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
1035 StoreRegistersStateStub stub(isolate);
1036 stub.GetCode();
1037}
1038
1039
1040void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
1041 RestoreRegistersStateStub stub(isolate);
1042 stub.GetCode();
1043}
1044
1045
1046void CodeStub::GenerateFPStubs(Isolate* isolate) {
1047 // Generate if not already in cache.
1048 SaveFPRegsMode mode = kSaveFPRegs;
1049 CEntryStub(isolate, 1, mode).GetCode();
1050 StoreBufferOverflowStub(isolate, mode).GetCode();
1051 isolate->set_fp_stubs_generated(true);
1052}
1053
1054
1055void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1056 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1057 stub.GetCode();
1058}
1059
1060
1061void CEntryStub::Generate(MacroAssembler* masm) {
1062 // Called from JavaScript; parameters are on stack as if calling JS function
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001063 // a0: number of arguments including receiver
1064 // a1: pointer to builtin function
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001065 // fp: frame pointer (restored after C call)
1066 // sp: stack pointer (restored as callee's sp after C call)
1067 // cp: current context (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001068 //
1069 // If argv_in_register():
1070 // a2: pointer to the first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001071
1072 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1073
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001074 if (argv_in_register()) {
1075 // Move argv into the correct register.
1076 __ mov(s1, a2);
1077 } else {
1078 // Compute the argv pointer in a callee-saved register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001079 __ Dlsa(s1, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001080 __ Dsubu(s1, s1, kPointerSize);
1081 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082
1083 // Enter the exit frame that transitions from JavaScript to C++.
1084 FrameScope scope(masm, StackFrame::MANUAL);
1085 __ EnterExitFrame(save_doubles());
1086
1087 // s0: number of arguments including receiver (C callee-saved)
1088 // s1: pointer to first argument (C callee-saved)
1089 // s2: pointer to builtin function (C callee-saved)
1090
1091 // Prepare arguments for C routine.
1092 // a0 = argc
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001093 __ mov(s0, a0);
1094 __ mov(s2, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095
1096 // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
1097 // also need to reserve the 4 argument slots on the stack.
1098
1099 __ AssertStackIsAligned();
1100
Ben Murdoch097c5b22016-05-18 11:27:45 +01001101 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1102 int frame_alignment_mask = frame_alignment - 1;
1103 int result_stack_size;
1104 if (result_size() <= 2) {
1105 // a0 = argc, a1 = argv, a2 = isolate
1106 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1107 __ mov(a1, s1);
1108 result_stack_size = 0;
1109 } else {
1110 DCHECK_EQ(3, result_size());
1111 // Allocate additional space for the result.
1112 result_stack_size =
1113 ((result_size() * kPointerSize) + frame_alignment_mask) &
1114 ~frame_alignment_mask;
1115 __ Dsubu(sp, sp, Operand(result_stack_size));
1116
1117 // a0 = hidden result argument, a1 = argc, a2 = argv, a3 = isolate.
1118 __ li(a3, Operand(ExternalReference::isolate_address(isolate())));
1119 __ mov(a2, s1);
1120 __ mov(a1, a0);
1121 __ mov(a0, sp);
1122 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001123
1124 // To let the GC traverse the return address of the exit frames, we need to
1125 // know where the return address is. The CEntryStub is unmovable, so
1126 // we can store the address on the stack to be able to find it again and
1127 // we never have to restore it, because it will not change.
1128 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001129 int kNumInstructionsToJump = 4;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130 Label find_ra;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001131 // Adjust the value in ra to point to the correct return location, 2nd
1132 // instruction past the real call into C code (the jalr(t9)), and push it.
1133 // This is the return address of the exit frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001134 if (kArchVariant >= kMips64r6) {
1135 __ addiupc(ra, kNumInstructionsToJump + 1);
1136 } else {
1137 // This branch-and-link sequence is needed to find the current PC on mips
1138 // before r6, saved to the ra register.
1139 __ bal(&find_ra); // bal exposes branch delay slot.
1140 __ Daddu(ra, ra, kNumInstructionsToJump * Instruction::kInstrSize);
1141 }
1142 __ bind(&find_ra);
1143
1144 // This spot was reserved in EnterExitFrame.
1145 __ sd(ra, MemOperand(sp, result_stack_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146 // Stack space reservation moved to the branch delay slot below.
1147 // Stack is still aligned.
1148
1149 // Call the C routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001150 __ mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC.
1151 __ jalr(t9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001152 // Set up sp in the delay slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001153 __ daddiu(sp, sp, -kCArgsSlotsSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001154 // Make sure the stored 'ra' points to this position.
1155 DCHECK_EQ(kNumInstructionsToJump,
1156 masm->InstructionsGeneratedSince(&find_ra));
1157 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001158 if (result_size() > 2) {
1159 DCHECK_EQ(3, result_size());
1160 // Read result values stored on stack.
1161 __ ld(a0, MemOperand(v0, 2 * kPointerSize));
1162 __ ld(v1, MemOperand(v0, 1 * kPointerSize));
1163 __ ld(v0, MemOperand(v0, 0 * kPointerSize));
1164 }
1165 // Result returned in v0, v1:v0 or a0:v1:v0 - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001167 // Check result for exception sentinel.
1168 Label exception_returned;
1169 __ LoadRoot(a4, Heap::kExceptionRootIndex);
1170 __ Branch(&exception_returned, eq, a4, Operand(v0));
1171
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172 // Check that there is no pending exception, otherwise we
1173 // should have returned the exception sentinel.
1174 if (FLAG_debug_code) {
1175 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001176 ExternalReference pending_exception_address(
1177 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178 __ li(a2, Operand(pending_exception_address));
1179 __ ld(a2, MemOperand(a2));
1180 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
1181 // Cannot use check here as it attempts to generate call into runtime.
1182 __ Branch(&okay, eq, a4, Operand(a2));
1183 __ stop("Unexpected pending exception");
1184 __ bind(&okay);
1185 }
1186
1187 // Exit C frame and return.
1188 // v0:v1: result
1189 // sp: stack pointer
1190 // fp: frame pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191 Register argc;
1192 if (argv_in_register()) {
1193 // We don't want to pop arguments so set argc to no_reg.
1194 argc = no_reg;
1195 } else {
1196 // s0: still holds argc (callee-saved).
1197 argc = s0;
1198 }
1199 __ LeaveExitFrame(save_doubles(), argc, true, EMIT_RETURN);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001200
1201 // Handling of exception.
1202 __ bind(&exception_returned);
1203
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001204 ExternalReference pending_handler_context_address(
1205 Isolate::kPendingHandlerContextAddress, isolate());
1206 ExternalReference pending_handler_code_address(
1207 Isolate::kPendingHandlerCodeAddress, isolate());
1208 ExternalReference pending_handler_offset_address(
1209 Isolate::kPendingHandlerOffsetAddress, isolate());
1210 ExternalReference pending_handler_fp_address(
1211 Isolate::kPendingHandlerFPAddress, isolate());
1212 ExternalReference pending_handler_sp_address(
1213 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001215 // Ask the runtime for help to determine the handler. This will set v0 to
1216 // contain the current pending exception, don't clobber it.
1217 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1218 isolate());
1219 {
1220 FrameScope scope(masm, StackFrame::MANUAL);
1221 __ PrepareCallCFunction(3, 0, a0);
1222 __ mov(a0, zero_reg);
1223 __ mov(a1, zero_reg);
1224 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1225 __ CallCFunction(find_handler, 3);
1226 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001227
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001228 // Retrieve the handler context, SP and FP.
1229 __ li(cp, Operand(pending_handler_context_address));
1230 __ ld(cp, MemOperand(cp));
1231 __ li(sp, Operand(pending_handler_sp_address));
1232 __ ld(sp, MemOperand(sp));
1233 __ li(fp, Operand(pending_handler_fp_address));
1234 __ ld(fp, MemOperand(fp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001235
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001236 // If the handler is a JS frame, restore the context to the frame. Note that
1237 // the context will be set to (cp == 0) for non-JS frames.
1238 Label zero;
1239 __ Branch(&zero, eq, cp, Operand(zero_reg));
1240 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1241 __ bind(&zero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001242
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001243 // Compute the handler entry address and jump to it.
1244 __ li(a1, Operand(pending_handler_code_address));
1245 __ ld(a1, MemOperand(a1));
1246 __ li(a2, Operand(pending_handler_offset_address));
1247 __ ld(a2, MemOperand(a2));
1248 __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
1249 __ Daddu(t9, a1, a2);
1250 __ Jump(t9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001251}
1252
1253
1254void JSEntryStub::Generate(MacroAssembler* masm) {
1255 Label invoke, handler_entry, exit;
1256 Isolate* isolate = masm->isolate();
1257
1258 // TODO(plind): unify the ABI description here.
1259 // Registers:
1260 // a0: entry address
1261 // a1: function
1262 // a2: receiver
1263 // a3: argc
1264 // a4 (a4): on mips64
1265
1266 // Stack:
1267 // 0 arg slots on mips64 (4 args slots on mips)
1268 // args -- in a4/a4 on mips64, on stack on mips
1269
1270 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1271
1272 // Save callee saved registers on the stack.
1273 __ MultiPush(kCalleeSaved | ra.bit());
1274
1275 // Save callee-saved FPU registers.
1276 __ MultiPushFPU(kCalleeSavedFPU);
1277 // Set up the reserved register for 0.0.
1278 __ Move(kDoubleRegZero, 0.0);
1279
1280 // Load argv in s0 register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001281 __ mov(s0, a4); // 5th parameter in mips64 a4 (a4) register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001282
1283 __ InitializeRootRegister();
1284
1285 // We build an EntryFrame.
1286 __ li(a7, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1287 int marker = type();
1288 __ li(a6, Operand(Smi::FromInt(marker)));
1289 __ li(a5, Operand(Smi::FromInt(marker)));
1290 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
1291 __ li(a4, Operand(c_entry_fp));
1292 __ ld(a4, MemOperand(a4));
1293 __ Push(a7, a6, a5, a4);
1294 // Set up frame pointer for the frame to be pushed.
1295 __ daddiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
1296
1297 // Registers:
1298 // a0: entry_address
1299 // a1: function
1300 // a2: receiver_pointer
1301 // a3: argc
1302 // s0: argv
1303 //
1304 // Stack:
1305 // caller fp |
1306 // function slot | entry frame
1307 // context slot |
1308 // bad fp (0xff...f) |
1309 // callee saved registers + ra
1310 // [ O32: 4 args slots]
1311 // args
1312
1313 // If this is the outermost JS call, set js_entry_sp value.
1314 Label non_outermost_js;
1315 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1316 __ li(a5, Operand(ExternalReference(js_entry_sp)));
1317 __ ld(a6, MemOperand(a5));
1318 __ Branch(&non_outermost_js, ne, a6, Operand(zero_reg));
1319 __ sd(fp, MemOperand(a5));
1320 __ li(a4, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1321 Label cont;
1322 __ b(&cont);
1323 __ nop(); // Branch delay slot nop.
1324 __ bind(&non_outermost_js);
1325 __ li(a4, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1326 __ bind(&cont);
1327 __ push(a4);
1328
1329 // Jump to a faked try block that does the invoke, with a faked catch
1330 // block that sets the pending exception.
1331 __ jmp(&invoke);
1332 __ bind(&handler_entry);
1333 handler_offset_ = handler_entry.pos();
1334 // Caught exception: Store result (exception) in the pending exception
1335 // field in the JSEnv and return a failure sentinel. Coming in here the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 // fp will be invalid because the PushStackHandler below sets it to 0 to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001337 // signal the existence of the JSEntry frame.
1338 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1339 isolate)));
1340 __ sd(v0, MemOperand(a4)); // We come back from 'invoke'. result is in v0.
1341 __ LoadRoot(v0, Heap::kExceptionRootIndex);
1342 __ b(&exit); // b exposes branch delay slot.
1343 __ nop(); // Branch delay slot nop.
1344
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001345 // Invoke: Link this frame into the handler chain.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001346 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001347 __ PushStackHandler();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001348 // If an exception not caught by another handler occurs, this handler
1349 // returns control to the code after the bal(&invoke) above, which
1350 // restores all kCalleeSaved registers (including cp and fp) to their
1351 // saved values before returning a failure to C.
1352
1353 // Clear any pending exceptions.
1354 __ LoadRoot(a5, Heap::kTheHoleValueRootIndex);
1355 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1356 isolate)));
1357 __ sd(a5, MemOperand(a4));
1358
1359 // Invoke the function by calling through JS entry trampoline builtin.
1360 // Notice that we cannot store a reference to the trampoline code directly in
1361 // this stub, because runtime stubs are not traversed when doing GC.
1362
1363 // Registers:
1364 // a0: entry_address
1365 // a1: function
1366 // a2: receiver_pointer
1367 // a3: argc
1368 // s0: argv
1369 //
1370 // Stack:
1371 // handler frame
1372 // entry frame
1373 // callee saved registers + ra
1374 // [ O32: 4 args slots]
1375 // args
1376
1377 if (type() == StackFrame::ENTRY_CONSTRUCT) {
1378 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1379 isolate);
1380 __ li(a4, Operand(construct_entry));
1381 } else {
1382 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
1383 __ li(a4, Operand(entry));
1384 }
1385 __ ld(t9, MemOperand(a4)); // Deref address.
1386 // Call JSEntryTrampoline.
1387 __ daddiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
1388 __ Call(t9);
1389
1390 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001391 __ PopStackHandler();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392
1393 __ bind(&exit); // v0 holds result
1394 // Check if the current stack frame is marked as the outermost JS frame.
1395 Label non_outermost_js_2;
1396 __ pop(a5);
1397 __ Branch(&non_outermost_js_2,
1398 ne,
1399 a5,
1400 Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1401 __ li(a5, Operand(ExternalReference(js_entry_sp)));
1402 __ sd(zero_reg, MemOperand(a5));
1403 __ bind(&non_outermost_js_2);
1404
1405 // Restore the top frame descriptors from the stack.
1406 __ pop(a5);
1407 __ li(a4, Operand(ExternalReference(Isolate::kCEntryFPAddress,
1408 isolate)));
1409 __ sd(a5, MemOperand(a4));
1410
1411 // Reset the stack to the callee saved registers.
1412 __ daddiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
1413
1414 // Restore callee-saved fpu registers.
1415 __ MultiPopFPU(kCalleeSavedFPU);
1416
1417 // Restore callee saved registers from the stack.
1418 __ MultiPop(kCalleeSaved | ra.bit());
1419 // Return.
1420 __ Jump(ra);
1421}
1422
1423
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001424void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1425 // Return address is in ra.
1426 Label miss;
1427
1428 Register receiver = LoadDescriptor::ReceiverRegister();
1429 Register index = LoadDescriptor::NameRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001430 Register scratch = a5;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001431 Register result = v0;
1432 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001433 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001434
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001435 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1436 &miss, // When not a string.
1437 &miss, // When not a number.
1438 &miss, // When index out of range.
1439 STRING_INDEX_IS_ARRAY_INDEX,
1440 RECEIVER_IS_STRING);
1441 char_at_generator.GenerateFast(masm);
1442 __ Ret();
1443
1444 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001445 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001446
1447 __ bind(&miss);
1448 PropertyAccessCompiler::TailCallBuiltin(
1449 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1450}
1451
1452
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001453void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1454 Label miss;
1455 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001456 // Ensure that the vector and slot registers won't be clobbered before
1457 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001458 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(),
1459 LoadWithVectorDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001460
1461 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, a4,
1462 a5, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001463 __ bind(&miss);
1464 PropertyAccessCompiler::TailCallBuiltin(
1465 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1466}
1467
1468
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001469void RegExpExecStub::Generate(MacroAssembler* masm) {
1470 // Just jump directly to runtime if native RegExp is not selected at compile
1471 // time or if regexp entry in generated code is turned off runtime switch or
1472 // at compilation.
1473#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001475#else // V8_INTERPRETED_REGEXP
1476
1477 // Stack frame on entry.
1478 // sp[0]: last_match_info (expected JSArray)
1479 // sp[4]: previous index
1480 // sp[8]: subject string
1481 // sp[12]: JSRegExp object
1482
1483 const int kLastMatchInfoOffset = 0 * kPointerSize;
1484 const int kPreviousIndexOffset = 1 * kPointerSize;
1485 const int kSubjectOffset = 2 * kPointerSize;
1486 const int kJSRegExpOffset = 3 * kPointerSize;
1487
1488 Label runtime;
1489 // Allocation of registers for this function. These are in callee save
1490 // registers and will be preserved by the call to the native RegExp code, as
1491 // this code is called using the normal C calling convention. When calling
1492 // directly from generated code the native RegExp code will not do a GC and
1493 // therefore the content of these registers are safe to use after the call.
1494 // MIPS - using s0..s2, since we are not using CEntry Stub.
1495 Register subject = s0;
1496 Register regexp_data = s1;
1497 Register last_match_info_elements = s2;
1498
1499 // Ensure that a RegExp stack is allocated.
1500 ExternalReference address_of_regexp_stack_memory_address =
1501 ExternalReference::address_of_regexp_stack_memory_address(
1502 isolate());
1503 ExternalReference address_of_regexp_stack_memory_size =
1504 ExternalReference::address_of_regexp_stack_memory_size(isolate());
1505 __ li(a0, Operand(address_of_regexp_stack_memory_size));
1506 __ ld(a0, MemOperand(a0, 0));
1507 __ Branch(&runtime, eq, a0, Operand(zero_reg));
1508
1509 // Check that the first argument is a JSRegExp object.
1510 __ ld(a0, MemOperand(sp, kJSRegExpOffset));
1511 STATIC_ASSERT(kSmiTag == 0);
1512 __ JumpIfSmi(a0, &runtime);
1513 __ GetObjectType(a0, a1, a1);
1514 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE));
1515
1516 // Check that the RegExp has been compiled (data contains a fixed array).
1517 __ ld(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset));
1518 if (FLAG_debug_code) {
1519 __ SmiTst(regexp_data, a4);
1520 __ Check(nz,
1521 kUnexpectedTypeForRegExpDataFixedArrayExpected,
1522 a4,
1523 Operand(zero_reg));
1524 __ GetObjectType(regexp_data, a0, a0);
1525 __ Check(eq,
1526 kUnexpectedTypeForRegExpDataFixedArrayExpected,
1527 a0,
1528 Operand(FIXED_ARRAY_TYPE));
1529 }
1530
1531 // regexp_data: RegExp data (FixedArray)
1532 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1533 __ ld(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
1534 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
1535
1536 // regexp_data: RegExp data (FixedArray)
1537 // Check that the number of captures fit in the static offsets vector buffer.
1538 __ ld(a2,
1539 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
1540 // Check (number_of_captures + 1) * 2 <= offsets vector size
1541 // Or number_of_captures * 2 <= offsets vector size - 2
1542 // Or number_of_captures <= offsets vector size / 2 - 1
1543 // Multiplying by 2 comes for free since a2 is smi-tagged.
1544 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1545 int temp = Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1;
1546 __ Branch(&runtime, hi, a2, Operand(Smi::FromInt(temp)));
1547
1548 // Reset offset for possibly sliced string.
1549 __ mov(t0, zero_reg);
1550 __ ld(subject, MemOperand(sp, kSubjectOffset));
1551 __ JumpIfSmi(subject, &runtime);
1552 __ mov(a3, subject); // Make a copy of the original subject string.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001553
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001554 // subject: subject string
1555 // a3: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556 // regexp_data: RegExp data (FixedArray)
1557 // Handle subject string according to its encoding and representation:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001558 // (1) Sequential string? If yes, go to (4).
1559 // (2) Sequential or cons? If not, go to (5).
1560 // (3) Cons string. If the string is flat, replace subject with first string
1561 // and go to (1). Otherwise bail out to runtime.
1562 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001563 // (E) Carry on.
1564 /// [...]
1565
1566 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001567 // (5) Long external string? If not, go to (7).
1568 // (6) External string. Make it, offset-wise, look like a sequential string.
1569 // Go to (4).
1570 // (7) Short external string or not a string? If yes, bail out to runtime.
1571 // (8) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001572
Ben Murdoch097c5b22016-05-18 11:27:45 +01001573 Label check_underlying; // (1)
1574 Label seq_string; // (4)
1575 Label not_seq_nor_cons; // (5)
1576 Label external_string; // (6)
1577 Label not_long_external; // (7)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001578
Ben Murdoch097c5b22016-05-18 11:27:45 +01001579 __ bind(&check_underlying);
1580 __ ld(a2, FieldMemOperand(subject, HeapObject::kMapOffset));
1581 __ lbu(a0, FieldMemOperand(a2, Map::kInstanceTypeOffset));
1582
1583 // (1) Sequential string? If yes, go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001584 __ And(a1,
1585 a0,
1586 Operand(kIsNotStringMask |
1587 kStringRepresentationMask |
1588 kShortExternalStringMask));
1589 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001590 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); // Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001591
Ben Murdoch097c5b22016-05-18 11:27:45 +01001592 // (2) Sequential or cons? If not, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001593 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1594 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1595 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1596 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001597 // Go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001598 __ Branch(&not_seq_nor_cons, ge, a1, Operand(kExternalStringTag));
1599
1600 // (3) Cons string. Check that it's flat.
1601 // Replace subject with first string and reload instance type.
1602 __ ld(a0, FieldMemOperand(subject, ConsString::kSecondOffset));
1603 __ LoadRoot(a1, Heap::kempty_stringRootIndex);
1604 __ Branch(&runtime, ne, a0, Operand(a1));
1605 __ ld(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001606 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001607
Ben Murdoch097c5b22016-05-18 11:27:45 +01001608 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001609 __ bind(&seq_string);
1610 // subject: sequential subject string (or look-alike, external string)
1611 // a3: original subject string
1612 // Load previous index and check range before a3 is overwritten. We have to
1613 // use a3 instead of subject here because subject might have been only made
1614 // to look like a sequential string when it actually is an external string.
1615 __ ld(a1, MemOperand(sp, kPreviousIndexOffset));
1616 __ JumpIfNotSmi(a1, &runtime);
1617 __ ld(a3, FieldMemOperand(a3, String::kLengthOffset));
1618 __ Branch(&runtime, ls, a3, Operand(a1));
1619 __ SmiUntag(a1);
1620
1621 STATIC_ASSERT(kStringEncodingMask == 4);
1622 STATIC_ASSERT(kOneByteStringTag == 4);
1623 STATIC_ASSERT(kTwoByteStringTag == 0);
1624 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for one_byte.
1625 __ ld(t9, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset));
1626 __ dsra(a3, a0, 2); // a3 is 1 for one_byte, 0 for UC16 (used below).
1627 __ ld(a5, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
1628 __ Movz(t9, a5, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
1629
1630 // (E) Carry on. String handling is done.
1631 // t9: irregexp code
1632 // Check that the irregexp code has been generated for the actual string
1633 // encoding. If it has, the field contains a code object otherwise it contains
1634 // a smi (code flushing support).
1635 __ JumpIfSmi(t9, &runtime);
1636
1637 // a1: previous index
1638 // a3: encoding of subject string (1 if one_byte, 0 if two_byte);
1639 // t9: code
1640 // subject: Subject string
1641 // regexp_data: RegExp data (FixedArray)
1642 // All checks done. Now push arguments for native regexp code.
1643 __ IncrementCounter(isolate()->counters()->regexp_entry_native(),
1644 1, a0, a2);
1645
1646 // Isolates: note we add an additional parameter here (isolate pointer).
1647 const int kRegExpExecuteArguments = 9;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001648 const int kParameterRegisters = 8;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
1650
1651 // Stack pointer now points to cell where return address is to be written.
1652 // Arguments are before that on the stack or in registers, meaning we
1653 // treat the return address as argument 5. Thus every argument after that
1654 // needs to be shifted back by 1. Since DirectCEntryStub will handle
1655 // allocating space for the c argument slots, we don't need to calculate
1656 // that into the argument positions on the stack. This is how the stack will
1657 // look (sp meaning the value of sp at this moment):
1658 // Abi n64:
1659 // [sp + 1] - Argument 9
1660 // [sp + 0] - saved ra
1661 // Abi O32:
1662 // [sp + 5] - Argument 9
1663 // [sp + 4] - Argument 8
1664 // [sp + 3] - Argument 7
1665 // [sp + 2] - Argument 6
1666 // [sp + 1] - Argument 5
1667 // [sp + 0] - saved ra
1668
Ben Murdoch097c5b22016-05-18 11:27:45 +01001669 // Argument 9: Pass current isolate address.
1670 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
1671 __ sd(a0, MemOperand(sp, 1 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001672
Ben Murdoch097c5b22016-05-18 11:27:45 +01001673 // Argument 8: Indicate that this is a direct call from JavaScript.
1674 __ li(a7, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001675
Ben Murdoch097c5b22016-05-18 11:27:45 +01001676 // Argument 7: Start (high end) of backtracking stack memory area.
1677 __ li(a0, Operand(address_of_regexp_stack_memory_address));
1678 __ ld(a0, MemOperand(a0, 0));
1679 __ li(a2, Operand(address_of_regexp_stack_memory_size));
1680 __ ld(a2, MemOperand(a2, 0));
1681 __ daddu(a6, a0, a2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001682
Ben Murdoch097c5b22016-05-18 11:27:45 +01001683 // Argument 6: Set the number of capture registers to zero to force global
1684 // regexps to behave as non-global. This does not affect non-global regexps.
1685 __ mov(a5, zero_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686
Ben Murdoch097c5b22016-05-18 11:27:45 +01001687 // Argument 5: static offsets vector buffer.
1688 __ li(
1689 a4,
1690 Operand(ExternalReference::address_of_static_offsets_vector(isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691
1692 // For arguments 4 and 3 get string length, calculate start of string data
1693 // and calculate the shift of the index (0 for one_byte and 1 for two byte).
1694 __ Daddu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
1695 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte.
1696 // Load the length from the original subject string from the previous stack
1697 // frame. Therefore we have to use fp, which points exactly to two pointer
1698 // sizes below the previous sp. (Because creating a new stack frame pushes
1699 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
1700 __ ld(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
1701 // If slice offset is not 0, load the length from the original sliced string.
1702 // Argument 4, a3: End of string data
1703 // Argument 3, a2: Start of string data
1704 // Prepare start and end index of the input.
1705 __ dsllv(t1, t0, a3);
1706 __ daddu(t0, t2, t1);
1707 __ dsllv(t1, a1, a3);
1708 __ daddu(a2, t0, t1);
1709
1710 __ ld(t2, FieldMemOperand(subject, String::kLengthOffset));
1711
1712 __ SmiUntag(t2);
1713 __ dsllv(t1, t2, a3);
1714 __ daddu(a3, t0, t1);
1715 // Argument 2 (a1): Previous index.
1716 // Already there
1717
1718 // Argument 1 (a0): Subject string.
1719 __ mov(a0, subject);
1720
1721 // Locate the code entry and call it.
1722 __ Daddu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag));
1723 DirectCEntryStub stub(isolate());
1724 stub.GenerateCall(masm, t9);
1725
1726 __ LeaveExitFrame(false, no_reg, true);
1727
1728 // v0: result
1729 // subject: subject string (callee saved)
1730 // regexp_data: RegExp data (callee saved)
1731 // last_match_info_elements: Last match info elements (callee saved)
1732 // Check the result.
1733 Label success;
1734 __ Branch(&success, eq, v0, Operand(1));
1735 // We expect exactly one result since we force the called regexp to behave
1736 // as non-global.
1737 Label failure;
1738 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
1739 // If not exception it can only be retry. Handle that in the runtime system.
1740 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
1741 // Result must now be exception. If there is no pending exception already a
1742 // stack overflow (on the backtrack stack) was detected in RegExp code but
1743 // haven't created the exception yet. Handle that in the runtime system.
1744 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1745 __ li(a1, Operand(isolate()->factory()->the_hole_value()));
1746 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1747 isolate())));
1748 __ ld(v0, MemOperand(a2, 0));
1749 __ Branch(&runtime, eq, v0, Operand(a1));
1750
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001751 // For exception, throw the exception again.
1752 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753
1754 __ bind(&failure);
1755 // For failure and exception return null.
1756 __ li(v0, Operand(isolate()->factory()->null_value()));
1757 __ DropAndRet(4);
1758
1759 // Process the result from the native regexp code.
1760 __ bind(&success);
1761
1762 __ lw(a1, UntagSmiFieldMemOperand(
1763 regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
1764 // Calculate number of capture registers (number_of_captures + 1) * 2.
1765 __ Daddu(a1, a1, Operand(1));
1766 __ dsll(a1, a1, 1); // Multiply by 2.
1767
1768 __ ld(a0, MemOperand(sp, kLastMatchInfoOffset));
1769 __ JumpIfSmi(a0, &runtime);
1770 __ GetObjectType(a0, a2, a2);
1771 __ Branch(&runtime, ne, a2, Operand(JS_ARRAY_TYPE));
1772 // Check that the JSArray is in fast case.
1773 __ ld(last_match_info_elements,
1774 FieldMemOperand(a0, JSArray::kElementsOffset));
1775 __ ld(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
1776 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
1777 __ Branch(&runtime, ne, a0, Operand(at));
1778 // Check that the last match info has space for the capture registers and the
1779 // additional information.
1780 __ ld(a0,
1781 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
1782 __ Daddu(a2, a1, Operand(RegExpImpl::kLastMatchOverhead));
1783
1784 __ SmiUntag(at, a0);
1785 __ Branch(&runtime, gt, a2, Operand(at));
1786
1787 // a1: number of capture registers
1788 // subject: subject string
1789 // Store the capture count.
1790 __ SmiTag(a2, a1); // To smi.
1791 __ sd(a2, FieldMemOperand(last_match_info_elements,
1792 RegExpImpl::kLastCaptureCountOffset));
1793 // Store last subject and last input.
1794 __ sd(subject,
1795 FieldMemOperand(last_match_info_elements,
1796 RegExpImpl::kLastSubjectOffset));
1797 __ mov(a2, subject);
1798 __ RecordWriteField(last_match_info_elements,
1799 RegExpImpl::kLastSubjectOffset,
1800 subject,
1801 a7,
1802 kRAHasNotBeenSaved,
1803 kDontSaveFPRegs);
1804 __ mov(subject, a2);
1805 __ sd(subject,
1806 FieldMemOperand(last_match_info_elements,
1807 RegExpImpl::kLastInputOffset));
1808 __ RecordWriteField(last_match_info_elements,
1809 RegExpImpl::kLastInputOffset,
1810 subject,
1811 a7,
1812 kRAHasNotBeenSaved,
1813 kDontSaveFPRegs);
1814
1815 // Get the static offsets vector filled by the native regexp code.
1816 ExternalReference address_of_static_offsets_vector =
1817 ExternalReference::address_of_static_offsets_vector(isolate());
1818 __ li(a2, Operand(address_of_static_offsets_vector));
1819
1820 // a1: number of capture registers
1821 // a2: offsets vector
1822 Label next_capture, done;
1823 // Capture register counter starts from number of capture registers and
1824 // counts down until wrapping after zero.
1825 __ Daddu(a0,
1826 last_match_info_elements,
1827 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
1828 __ bind(&next_capture);
1829 __ Dsubu(a1, a1, Operand(1));
1830 __ Branch(&done, lt, a1, Operand(zero_reg));
1831 // Read the value from the static offsets vector buffer.
1832 __ lw(a3, MemOperand(a2, 0));
1833 __ daddiu(a2, a2, kIntSize);
1834 // Store the smi value in the last match info.
1835 __ SmiTag(a3);
1836 __ sd(a3, MemOperand(a0, 0));
1837 __ Branch(&next_capture, USE_DELAY_SLOT);
1838 __ daddiu(a0, a0, kPointerSize); // In branch delay slot.
1839
1840 __ bind(&done);
1841
1842 // Return last match info.
1843 __ ld(v0, MemOperand(sp, kLastMatchInfoOffset));
1844 __ DropAndRet(4);
1845
1846 // Do the runtime call to execute the regexp.
1847 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001848 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001849
1850 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001851 // (5) Long external string? If not, go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001852 __ bind(&not_seq_nor_cons);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001853 // Go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001854 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag));
1855
Ben Murdoch097c5b22016-05-18 11:27:45 +01001856 // (6) External string. Make it, offset-wise, look like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857 __ bind(&external_string);
1858 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
1859 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
1860 if (FLAG_debug_code) {
1861 // Assert that we do not have a cons or slice (indirect strings) here.
1862 // Sequential strings have already been ruled out.
1863 __ And(at, a0, Operand(kIsIndirectStringMask));
1864 __ Assert(eq,
1865 kExternalStringExpectedButNotFound,
1866 at,
1867 Operand(zero_reg));
1868 }
1869 __ ld(subject,
1870 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
1871 // Move the pointer so that offset-wise, it looks like a sequential string.
1872 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1873 __ Dsubu(subject,
1874 subject,
1875 SeqTwoByteString::kHeaderSize - kHeapObjectTag);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001876 __ jmp(&seq_string); // Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001877
Ben Murdoch097c5b22016-05-18 11:27:45 +01001878 // (7) Short external string or not a string? If yes, bail out to runtime.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001879 __ bind(&not_long_external);
1880 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1881 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask));
1882 __ Branch(&runtime, ne, at, Operand(zero_reg));
1883
Ben Murdoch097c5b22016-05-18 11:27:45 +01001884 // (8) Sliced string. Replace subject with parent. Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001885 // Load offset into t0 and replace subject string with parent.
1886 __ ld(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
1887 __ SmiUntag(t0);
1888 __ ld(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001889 __ jmp(&check_underlying); // Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890#endif // V8_INTERPRETED_REGEXP
1891}
1892
1893
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001894static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1895 // a0 : number of arguments to the construct function
1896 // a2 : feedback vector
1897 // a3 : slot in feedback vector (Smi)
1898 // a1 : the function to call
1899 FrameScope scope(masm, StackFrame::INTERNAL);
1900 const RegList kSavedRegs = 1 << 4 | // a0
1901 1 << 5 | // a1
1902 1 << 6 | // a2
1903 1 << 7; // a3
1904
1905
1906 // Number-of-arguments register must be smi-tagged to call out.
1907 __ SmiTag(a0);
1908 __ MultiPush(kSavedRegs);
1909
1910 __ CallStub(stub);
1911
1912 __ MultiPop(kSavedRegs);
1913 __ SmiUntag(a0);
1914}
1915
1916
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917static void GenerateRecordCallTarget(MacroAssembler* masm) {
1918 // Cache the called function in a feedback vector slot. Cache states
1919 // are uninitialized, monomorphic (indicated by a JSFunction), and
1920 // megamorphic.
1921 // a0 : number of arguments to the construct function
1922 // a1 : the function to call
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001923 // a2 : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001924 // a3 : slot in feedback vector (Smi)
1925 Label initialize, done, miss, megamorphic, not_array_function;
1926
1927 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
1928 masm->isolate()->heap()->megamorphic_symbol());
1929 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()),
1930 masm->isolate()->heap()->uninitialized_symbol());
1931
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001932 // Load the cache state into a5.
1933 __ dsrl(a5, a3, 32 - kPointerSizeLog2);
1934 __ Daddu(a5, a2, Operand(a5));
1935 __ ld(a5, FieldMemOperand(a5, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001936
1937 // A monomorphic cache hit or an already megamorphic state: invoke the
1938 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001939 // We don't know if a5 is a WeakCell or a Symbol, but it's harmless to read at
1940 // this position in a symbol (see static asserts in type-feedback-vector.h).
1941 Label check_allocation_site;
1942 Register feedback_map = a6;
1943 Register weak_value = t0;
1944 __ ld(weak_value, FieldMemOperand(a5, WeakCell::kValueOffset));
1945 __ Branch(&done, eq, a1, Operand(weak_value));
1946 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
1947 __ Branch(&done, eq, a5, Operand(at));
1948 __ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset));
1949 __ LoadRoot(at, Heap::kWeakCellMapRootIndex);
1950 __ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001951
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001952 // If the weak cell is cleared, we have a new chance to become monomorphic.
1953 __ JumpIfSmi(weak_value, &initialize);
1954 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001955
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001956 __ bind(&check_allocation_site);
1957 // If we came here, we need to see if we are the array function.
1958 // If we didn't have a matching function, and we didn't find the megamorph
1959 // sentinel, then we have in the slot either some other function or an
1960 // AllocationSite.
1961 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
1962 __ Branch(&miss, ne, feedback_map, Operand(at));
1963
1964 // Make sure the function is the Array() function
1965 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
1966 __ Branch(&megamorphic, ne, a1, Operand(a5));
1967 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001968
1969 __ bind(&miss);
1970
1971 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1972 // megamorphic.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001973 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001974 __ Branch(&initialize, eq, a5, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001975 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1976 // write-barrier is needed.
1977 __ bind(&megamorphic);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001978 __ dsrl(a5, a3, 32 - kPointerSizeLog2);
1979 __ Daddu(a5, a2, Operand(a5));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001980 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001981 __ sd(at, FieldMemOperand(a5, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001982 __ jmp(&done);
1983
1984 // An uninitialized cache is patched with the function.
1985 __ bind(&initialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001986 // Make sure the function is the Array() function.
1987 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
1988 __ Branch(&not_array_function, ne, a1, Operand(a5));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001989
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001990 // The target function is the Array constructor,
1991 // Create an AllocationSite if we don't already have it, store it in the
1992 // slot.
1993 CreateAllocationSiteStub create_stub(masm->isolate());
1994 CallStubInRecordCallTarget(masm, &create_stub);
1995 __ Branch(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001996
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001997 __ bind(&not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001998
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001999 CreateWeakCellStub weak_cell_stub(masm->isolate());
2000 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002001 __ bind(&done);
2002}
2003
2004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002005void CallConstructStub::Generate(MacroAssembler* masm) {
2006 // a0 : number of arguments
2007 // a1 : the function to call
2008 // a2 : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002009 // a3 : slot in feedback vector (Smi, for RecordCallTarget)
2010
2011 Label non_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002012 // Check that the function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002013 __ JumpIfSmi(a1, &non_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002014 // Check that the function is a JSFunction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002015 __ GetObjectType(a1, a5, a5);
2016 __ Branch(&non_function, ne, a5, Operand(JS_FUNCTION_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002017
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002018 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002019
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020 __ dsrl(at, a3, 32 - kPointerSizeLog2);
2021 __ Daddu(a5, a2, at);
2022 Label feedback_register_initialized;
2023 // Put the AllocationSite from the feedback vector into a2, or undefined.
2024 __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
2025 __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
2026 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2027 __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
2028 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2029 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002030
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031 __ AssertUndefinedOrAllocationSite(a2, a5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002032
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002033 // Pass function as new target.
2034 __ mov(a3, a1);
2035
2036 // Tail call to the function-specific construct stub (still in the caller
2037 // context at this point).
2038 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2039 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
2040 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041 __ Jump(at);
2042
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002043 __ bind(&non_function);
2044 __ mov(a3, a1);
2045 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002046}
2047
2048
2049// StringCharCodeAtGenerator.
2050void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2051 DCHECK(!a4.is(index_));
2052 DCHECK(!a4.is(result_));
2053 DCHECK(!a4.is(object_));
2054
2055 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002056 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2057 __ JumpIfSmi(object_, receiver_not_string_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002058
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002059 // Fetch the instance type of the receiver into result register.
2060 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2061 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2062 // If the receiver is not a string trigger the non-string case.
2063 __ And(a4, result_, Operand(kIsNotStringMask));
2064 __ Branch(receiver_not_string_, ne, a4, Operand(zero_reg));
2065 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002066
2067 // If the index is non-smi trigger the non-smi case.
2068 __ JumpIfNotSmi(index_, &index_not_smi_);
2069
2070 __ bind(&got_smi_index_);
2071
2072 // Check for index out of range.
2073 __ ld(a4, FieldMemOperand(object_, String::kLengthOffset));
2074 __ Branch(index_out_of_range_, ls, a4, Operand(index_));
2075
2076 __ SmiUntag(index_);
2077
2078 StringCharLoadGenerator::Generate(masm,
2079 object_,
2080 index_,
2081 result_,
2082 &call_runtime_);
2083
2084 __ SmiTag(result_);
2085 __ bind(&exit_);
2086}
2087
2088
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002089void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002090 // a1 - function
2091 // a3 - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002092 // a2 - vector
2093 // a4 - allocation site (loaded from vector[slot])
2094 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at);
2095 __ Branch(miss, ne, a1, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096
2097 __ li(a0, Operand(arg_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002098
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002099 // Increment the call count for monomorphic function calls.
2100 __ dsrl(t0, a3, 32 - kPointerSizeLog2);
2101 __ Daddu(a3, a2, Operand(t0));
2102 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
2103 __ Daddu(t0, t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2104 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002105
2106 __ mov(a2, a4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002107 __ mov(a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002108 ArrayConstructorStub stub(masm->isolate(), arg_count());
2109 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002110}
2111
2112
2113void CallICStub::Generate(MacroAssembler* masm) {
2114 // a1 - function
2115 // a3 - slot id (Smi)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002116 // a2 - vector
2117 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118 int argc = arg_count();
2119 ParameterCount actual(argc);
2120
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002121 // The checks. First, does r1 match the recorded monomorphic target?
2122 __ dsrl(a4, a3, 32 - kPointerSizeLog2);
2123 __ Daddu(a4, a2, Operand(a4));
2124 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002125
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002126 // We don't know that we have a weak cell. We might have a private symbol
2127 // or an AllocationSite, but the memory is safe to examine.
2128 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2129 // FixedArray.
2130 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2131 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2132 // computed, meaning that it can't appear to be a pointer. If the low bit is
2133 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2134 // to be a pointer.
2135 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2136 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2137 WeakCell::kValueOffset &&
2138 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 __ ld(a5, FieldMemOperand(a4, WeakCell::kValueOffset));
2141 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a5));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002143 // The compare above could have been a SMI/SMI comparison. Guard against this
2144 // convincing us that we have a monomorphic JSFunction.
2145 __ JumpIfSmi(a1, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002147 // Increment the call count for monomorphic function calls.
2148 __ dsrl(t0, a3, 32 - kPointerSizeLog2);
2149 __ Daddu(a3, a2, Operand(t0));
2150 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
2151 __ Daddu(t0, t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2152 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002154 __ bind(&call_function);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002155 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
2156 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
2158 USE_DELAY_SLOT);
2159 __ li(a0, Operand(argc)); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002160
2161 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002162 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002163
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002164 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002165 __ Branch(&call, eq, a4, Operand(at));
2166
2167 // Verify that a4 contains an AllocationSite
2168 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset));
2169 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2170 __ Branch(&not_allocation_site, ne, a5, Operand(at));
2171
2172 HandleArrayCase(masm, &miss);
2173
2174 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002175
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002176 // The following cases attempt to handle MISS cases without going to the
2177 // runtime.
2178 if (FLAG_trace_ic) {
2179 __ Branch(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002180 }
2181
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002182 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex);
2183 __ Branch(&uninitialized, eq, a4, Operand(at));
2184
2185 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2186 // to handle it here. More complex cases are dealt with in the runtime.
2187 __ AssertNotSmi(a4);
2188 __ GetObjectType(a4, a5, a5);
2189 __ Branch(&miss, ne, a5, Operand(JS_FUNCTION_TYPE));
2190 __ dsrl(a4, a3, 32 - kPointerSizeLog2);
2191 __ Daddu(a4, a2, Operand(a4));
2192 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
2193 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002194
2195 __ bind(&call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002196 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002197 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
2198 USE_DELAY_SLOT);
2199 __ li(a0, Operand(argc)); // In delay slot.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002200
2201 __ bind(&uninitialized);
2202
2203 // We are going monomorphic, provided we actually have a JSFunction.
2204 __ JumpIfSmi(a1, &miss);
2205
2206 // Goto miss case if we do not have a function.
2207 __ GetObjectType(a1, a4, a4);
2208 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE));
2209
2210 // Make sure the function is not the Array() function, which requires special
2211 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002212 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002213 __ Branch(&miss, eq, a1, Operand(a4));
2214
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002215 // Make sure the function belongs to the same native context.
2216 __ ld(t0, FieldMemOperand(a1, JSFunction::kContextOffset));
2217 __ ld(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX));
2218 __ ld(t1, NativeContextMemOperand());
2219 __ Branch(&miss, ne, t0, Operand(t1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002220
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002221 // Initialize the call counter.
2222 __ dsrl(at, a3, 32 - kPointerSizeLog2);
2223 __ Daddu(at, a2, Operand(at));
2224 __ li(t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2225 __ sd(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002226
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 // Store the function. Use a stub since we need a frame for allocation.
2228 // a2 - vector
2229 // a3 - slot
2230 // a1 - function
2231 {
2232 FrameScope scope(masm, StackFrame::INTERNAL);
2233 CreateWeakCellStub create_stub(masm->isolate());
2234 __ Push(a1);
2235 __ CallStub(&create_stub);
2236 __ Pop(a1);
2237 }
2238
2239 __ Branch(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002240
2241 // We are here because tracing is on or we encountered a MISS case we can't
2242 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002243 __ bind(&miss);
2244 GenerateMiss(masm);
2245
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002246 __ Branch(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002247}
2248
2249
2250void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002251 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002252
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002253 // Push the receiver and the function and feedback info.
2254 __ Push(a1, a2, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002256 // Call the entry.
2257 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002258
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002259 // Move result to a1 and exit the internal frame.
2260 __ mov(a1, v0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002261}
2262
2263
2264void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002265 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002266 const RuntimeCallHelper& call_helper) {
2267 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2268
2269 // Index is not a smi.
2270 __ bind(&index_not_smi_);
2271 // If index is a heap number, try converting it to an integer.
2272 __ CheckMap(index_,
2273 result_,
2274 Heap::kHeapNumberMapRootIndex,
2275 index_not_number_,
2276 DONT_DO_SMI_CHECK);
2277 call_helper.BeforeCall(masm);
2278 // Consumed by runtime conversion function:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002279 if (embed_mode == PART_OF_IC_HANDLER) {
2280 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2281 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2282 } else {
2283 __ Push(object_, index_);
2284 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002285 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002286 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002287 } else {
2288 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2289 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002290 __ CallRuntime(Runtime::kNumberToSmi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002291 }
2292
2293 // Save the conversion result before the pop instructions below
2294 // have a chance to overwrite it.
2295
2296 __ Move(index_, v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002297 if (embed_mode == PART_OF_IC_HANDLER) {
2298 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2299 LoadWithVectorDescriptor::SlotRegister(), object_);
2300 } else {
2301 __ pop(object_);
2302 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002303 // Reload the instance type.
2304 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2305 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2306 call_helper.AfterCall(masm);
2307 // If index is still not a smi, it must be out of range.
2308 __ JumpIfNotSmi(index_, index_out_of_range_);
2309 // Otherwise, return to the fast path.
2310 __ Branch(&got_smi_index_);
2311
2312 // Call runtime. We get here when the receiver is a string and the
2313 // index is a number, but the code of getting the actual character
2314 // is too complex (e.g., when the string needs to be flattened).
2315 __ bind(&call_runtime_);
2316 call_helper.BeforeCall(masm);
2317 __ SmiTag(index_);
2318 __ Push(object_, index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002319 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002320
2321 __ Move(result_, v0);
2322
2323 call_helper.AfterCall(masm);
2324 __ jmp(&exit_);
2325
2326 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2327}
2328
2329
2330// -------------------------------------------------------------------------
2331// StringCharFromCodeGenerator
2332
2333void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2334 // Fast case of Heap::LookupSingleCharacterStringFromCode.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002335 __ JumpIfNotSmi(code_, &slow_case_);
2336 __ Branch(&slow_case_, hi, code_,
2337 Operand(Smi::FromInt(String::kMaxOneByteCharCode)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002338
2339 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2340 // At this point code register contains smi tagged one_byte char code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002341 __ SmiScale(at, code_, kPointerSizeLog2);
2342 __ Daddu(result_, result_, at);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002343 __ ld(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002344 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2345 __ Branch(&slow_case_, eq, result_, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002346 __ bind(&exit_);
2347}
2348
2349
2350void StringCharFromCodeGenerator::GenerateSlow(
2351 MacroAssembler* masm,
2352 const RuntimeCallHelper& call_helper) {
2353 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2354
2355 __ bind(&slow_case_);
2356 call_helper.BeforeCall(masm);
2357 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002358 __ CallRuntime(Runtime::kStringCharFromCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002359 __ Move(result_, v0);
2360
2361 call_helper.AfterCall(masm);
2362 __ Branch(&exit_);
2363
2364 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2365}
2366
2367
2368enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
2369
2370
2371void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2372 Register dest,
2373 Register src,
2374 Register count,
2375 Register scratch,
2376 String::Encoding encoding) {
2377 if (FLAG_debug_code) {
2378 // Check that destination is word aligned.
2379 __ And(scratch, dest, Operand(kPointerAlignmentMask));
2380 __ Check(eq,
2381 kDestinationOfCopyNotAligned,
2382 scratch,
2383 Operand(zero_reg));
2384 }
2385
2386 // Assumes word reads and writes are little endian.
2387 // Nothing to do for zero characters.
2388 Label done;
2389
2390 if (encoding == String::TWO_BYTE_ENCODING) {
2391 __ Daddu(count, count, count);
2392 }
2393
2394 Register limit = count; // Read until dest equals this.
2395 __ Daddu(limit, dest, Operand(count));
2396
2397 Label loop_entry, loop;
2398 // Copy bytes from src to dest until dest hits limit.
2399 __ Branch(&loop_entry);
2400 __ bind(&loop);
2401 __ lbu(scratch, MemOperand(src));
2402 __ daddiu(src, src, 1);
2403 __ sb(scratch, MemOperand(dest));
2404 __ daddiu(dest, dest, 1);
2405 __ bind(&loop_entry);
2406 __ Branch(&loop, lt, dest, Operand(limit));
2407
2408 __ bind(&done);
2409}
2410
2411
2412void SubStringStub::Generate(MacroAssembler* masm) {
2413 Label runtime;
2414 // Stack frame on entry.
2415 // ra: return address
2416 // sp[0]: to
2417 // sp[4]: from
2418 // sp[8]: string
2419
2420 // This stub is called from the native-call %_SubString(...), so
2421 // nothing can be assumed about the arguments. It is tested that:
2422 // "string" is a sequential string,
2423 // both "from" and "to" are smis, and
2424 // 0 <= from <= to <= string.length.
2425 // If any of these assumptions fail, we call the runtime system.
2426
2427 const int kToOffset = 0 * kPointerSize;
2428 const int kFromOffset = 1 * kPointerSize;
2429 const int kStringOffset = 2 * kPointerSize;
2430
2431 __ ld(a2, MemOperand(sp, kToOffset));
2432 __ ld(a3, MemOperand(sp, kFromOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002433
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002434 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002435
2436 // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
2437 // safe in this case.
2438 __ JumpIfNotSmi(a2, &runtime);
2439 __ JumpIfNotSmi(a3, &runtime);
2440 // Both a2 and a3 are untagged integers.
2441
2442 __ SmiUntag(a2, a2);
2443 __ SmiUntag(a3, a3);
2444 __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
2445
2446 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
2447 __ Dsubu(a2, a2, a3);
2448
2449 // Make sure first argument is a string.
2450 __ ld(v0, MemOperand(sp, kStringOffset));
2451 __ JumpIfSmi(v0, &runtime);
2452 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2453 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2454 __ And(a4, a1, Operand(kIsNotStringMask));
2455
2456 __ Branch(&runtime, ne, a4, Operand(zero_reg));
2457
2458 Label single_char;
2459 __ Branch(&single_char, eq, a2, Operand(1));
2460
2461 // Short-cut for the case of trivial substring.
2462 Label return_v0;
2463 // v0: original string
2464 // a2: result string length
2465 __ ld(a4, FieldMemOperand(v0, String::kLengthOffset));
2466 __ SmiUntag(a4);
2467 // Return original string.
2468 __ Branch(&return_v0, eq, a2, Operand(a4));
2469 // Longer than original string's length or negative: unsafe arguments.
2470 __ Branch(&runtime, hi, a2, Operand(a4));
2471 // Shorter than original string's length: an actual substring.
2472
2473 // Deal with different string types: update the index if necessary
2474 // and put the underlying string into a5.
2475 // v0: original string
2476 // a1: instance type
2477 // a2: length
2478 // a3: from index (untagged)
2479 Label underlying_unpacked, sliced_string, seq_or_external_string;
2480 // If the string is not indirect, it can only be sequential or external.
2481 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2482 STATIC_ASSERT(kIsIndirectStringMask != 0);
2483 __ And(a4, a1, Operand(kIsIndirectStringMask));
2484 __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, a4, Operand(zero_reg));
2485 // a4 is used as a scratch register and can be overwritten in either case.
2486 __ And(a4, a1, Operand(kSlicedNotConsMask));
2487 __ Branch(&sliced_string, ne, a4, Operand(zero_reg));
2488 // Cons string. Check whether it is flat, then fetch first part.
2489 __ ld(a5, FieldMemOperand(v0, ConsString::kSecondOffset));
2490 __ LoadRoot(a4, Heap::kempty_stringRootIndex);
2491 __ Branch(&runtime, ne, a5, Operand(a4));
2492 __ ld(a5, FieldMemOperand(v0, ConsString::kFirstOffset));
2493 // Update instance type.
2494 __ ld(a1, FieldMemOperand(a5, HeapObject::kMapOffset));
2495 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2496 __ jmp(&underlying_unpacked);
2497
2498 __ bind(&sliced_string);
2499 // Sliced string. Fetch parent and correct start index by offset.
2500 __ ld(a5, FieldMemOperand(v0, SlicedString::kParentOffset));
2501 __ ld(a4, FieldMemOperand(v0, SlicedString::kOffsetOffset));
2502 __ SmiUntag(a4); // Add offset to index.
2503 __ Daddu(a3, a3, a4);
2504 // Update instance type.
2505 __ ld(a1, FieldMemOperand(a5, HeapObject::kMapOffset));
2506 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2507 __ jmp(&underlying_unpacked);
2508
2509 __ bind(&seq_or_external_string);
2510 // Sequential or external string. Just move string to the expected register.
2511 __ mov(a5, v0);
2512
2513 __ bind(&underlying_unpacked);
2514
2515 if (FLAG_string_slices) {
2516 Label copy_routine;
2517 // a5: underlying subject string
2518 // a1: instance type of underlying subject string
2519 // a2: length
2520 // a3: adjusted start index (untagged)
2521 // Short slice. Copy instead of slicing.
2522 __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
2523 // Allocate new sliced string. At this point we do not reload the instance
2524 // type including the string encoding because we simply rely on the info
2525 // provided by the original string. It does not matter if the original
2526 // string's encoding is wrong because we always have to recheck encoding of
2527 // the newly created string's parent anyways due to externalized strings.
2528 Label two_byte_slice, set_slice_header;
2529 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
2530 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2531 __ And(a4, a1, Operand(kStringEncodingMask));
2532 __ Branch(&two_byte_slice, eq, a4, Operand(zero_reg));
2533 __ AllocateOneByteSlicedString(v0, a2, a6, a7, &runtime);
2534 __ jmp(&set_slice_header);
2535 __ bind(&two_byte_slice);
2536 __ AllocateTwoByteSlicedString(v0, a2, a6, a7, &runtime);
2537 __ bind(&set_slice_header);
2538 __ SmiTag(a3);
2539 __ sd(a5, FieldMemOperand(v0, SlicedString::kParentOffset));
2540 __ sd(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
2541 __ jmp(&return_v0);
2542
2543 __ bind(&copy_routine);
2544 }
2545
2546 // a5: underlying subject string
2547 // a1: instance type of underlying subject string
2548 // a2: length
2549 // a3: adjusted start index (untagged)
2550 Label two_byte_sequential, sequential_string, allocate_result;
2551 STATIC_ASSERT(kExternalStringTag != 0);
2552 STATIC_ASSERT(kSeqStringTag == 0);
2553 __ And(a4, a1, Operand(kExternalStringTag));
2554 __ Branch(&sequential_string, eq, a4, Operand(zero_reg));
2555
2556 // Handle external string.
2557 // Rule out short external strings.
2558 STATIC_ASSERT(kShortExternalStringTag != 0);
2559 __ And(a4, a1, Operand(kShortExternalStringTag));
2560 __ Branch(&runtime, ne, a4, Operand(zero_reg));
2561 __ ld(a5, FieldMemOperand(a5, ExternalString::kResourceDataOffset));
2562 // a5 already points to the first character of underlying string.
2563 __ jmp(&allocate_result);
2564
2565 __ bind(&sequential_string);
2566 // Locate first character of underlying subject string.
2567 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2568 __ Daddu(a5, a5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2569
2570 __ bind(&allocate_result);
2571 // Sequential acii string. Allocate the result.
2572 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
2573 __ And(a4, a1, Operand(kStringEncodingMask));
2574 __ Branch(&two_byte_sequential, eq, a4, Operand(zero_reg));
2575
2576 // Allocate and copy the resulting one_byte string.
2577 __ AllocateOneByteString(v0, a2, a4, a6, a7, &runtime);
2578
2579 // Locate first character of substring to copy.
2580 __ Daddu(a5, a5, a3);
2581
2582 // Locate first character of result.
2583 __ Daddu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2584
2585 // v0: result string
2586 // a1: first character of result string
2587 // a2: result string length
2588 // a5: first character of substring to copy
2589 STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2590 StringHelper::GenerateCopyCharacters(
2591 masm, a1, a5, a2, a3, String::ONE_BYTE_ENCODING);
2592 __ jmp(&return_v0);
2593
2594 // Allocate and copy the resulting two-byte string.
2595 __ bind(&two_byte_sequential);
2596 __ AllocateTwoByteString(v0, a2, a4, a6, a7, &runtime);
2597
2598 // Locate first character of substring to copy.
2599 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002600 __ Dlsa(a5, a5, a3, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601 // Locate first character of result.
2602 __ Daddu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2603
2604 // v0: result string.
2605 // a1: first character of result.
2606 // a2: result length.
2607 // a5: first character of substring to copy.
2608 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2609 StringHelper::GenerateCopyCharacters(
2610 masm, a1, a5, a2, a3, String::TWO_BYTE_ENCODING);
2611
2612 __ bind(&return_v0);
2613 Counters* counters = isolate()->counters();
2614 __ IncrementCounter(counters->sub_string_native(), 1, a3, a4);
2615 __ DropAndRet(3);
2616
2617 // Just jump to runtime to create the sub string.
2618 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002619 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002620
2621 __ bind(&single_char);
2622 // v0: original string
2623 // a1: instance type
2624 // a2: length
2625 // a3: from index (untagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002626 __ SmiTag(a3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002627 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
2628 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002629 generator.GenerateFast(masm);
2630 __ DropAndRet(3);
2631 generator.SkipSlow(masm, &runtime);
2632}
2633
2634
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002635void ToNumberStub::Generate(MacroAssembler* masm) {
2636 // The ToNumber stub takes one argument in a0.
2637 Label not_smi;
2638 __ JumpIfNotSmi(a0, &not_smi);
2639 __ Ret(USE_DELAY_SLOT);
2640 __ mov(v0, a0);
2641 __ bind(&not_smi);
2642
2643 Label not_heap_number;
Ben Murdochda12d292016-06-02 14:46:10 +01002644 __ GetObjectType(a0, a1, a1);
2645 // a0: receiver
2646 // a1: receiver instance type
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002647 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2648 __ Ret(USE_DELAY_SLOT);
2649 __ mov(v0, a0);
2650 __ bind(&not_heap_number);
2651
Ben Murdochda12d292016-06-02 14:46:10 +01002652 NonNumberToNumberStub stub(masm->isolate());
2653 __ TailCallStub(&stub);
2654}
2655
2656void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
2657 // The NonNumberToNumber stub takes on argument in a0.
2658 __ AssertNotNumber(a0);
2659
2660 Label not_string;
2661 __ GetObjectType(a0, a1, a1);
2662 // a0: receiver
2663 // a1: receiver instance type
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002664 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochda12d292016-06-02 14:46:10 +01002665 StringToNumberStub stub(masm->isolate());
2666 __ TailCallStub(&stub);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002667 __ bind(&not_string);
2668
2669 Label not_oddball;
2670 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2671 __ Ret(USE_DELAY_SLOT);
Ben Murdochda12d292016-06-02 14:46:10 +01002672 __ ld(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); // In delay slot.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002673 __ bind(&not_oddball);
2674
Ben Murdochda12d292016-06-02 14:46:10 +01002675 __ Push(a0); // Push argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002676 __ TailCallRuntime(Runtime::kToNumber);
2677}
2678
Ben Murdochda12d292016-06-02 14:46:10 +01002679void StringToNumberStub::Generate(MacroAssembler* masm) {
2680 // The StringToNumber stub takes on argument in a0.
2681 __ AssertString(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002682
Ben Murdochda12d292016-06-02 14:46:10 +01002683 // Check if string has a cached array index.
2684 Label runtime;
2685 __ lwu(a2, FieldMemOperand(a0, String::kHashFieldOffset));
2686 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
2687 __ Branch(&runtime, ne, at, Operand(zero_reg));
2688 __ IndexFromHash(a2, v0);
2689 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002690
Ben Murdochda12d292016-06-02 14:46:10 +01002691 __ bind(&runtime);
2692 __ Push(a0); // Push argument.
2693 __ TailCallRuntime(Runtime::kStringToNumber);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002694}
2695
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002696void ToStringStub::Generate(MacroAssembler* masm) {
2697 // The ToString stub takes on argument in a0.
2698 Label is_number;
2699 __ JumpIfSmi(a0, &is_number);
2700
2701 Label not_string;
2702 __ GetObjectType(a0, a1, a1);
2703 // a0: receiver
2704 // a1: receiver instance type
2705 __ Branch(&not_string, ge, a1, Operand(FIRST_NONSTRING_TYPE));
2706 __ Ret(USE_DELAY_SLOT);
2707 __ mov(v0, a0);
2708 __ bind(&not_string);
2709
2710 Label not_heap_number;
2711 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2712 __ bind(&is_number);
2713 NumberToStringStub stub(isolate());
2714 __ TailCallStub(&stub);
2715 __ bind(&not_heap_number);
2716
2717 Label not_oddball;
2718 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2719 __ Ret(USE_DELAY_SLOT);
2720 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
2721 __ bind(&not_oddball);
2722
2723 __ push(a0); // Push argument.
2724 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002725}
2726
2727
Ben Murdoch097c5b22016-05-18 11:27:45 +01002728void ToNameStub::Generate(MacroAssembler* masm) {
2729 // The ToName stub takes on argument in a0.
2730 Label is_number;
2731 __ JumpIfSmi(a0, &is_number);
2732
2733 Label not_name;
2734 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2735 __ GetObjectType(a0, a1, a1);
2736 // a0: receiver
2737 // a1: receiver instance type
2738 __ Branch(&not_name, gt, a1, Operand(LAST_NAME_TYPE));
2739 __ Ret(USE_DELAY_SLOT);
2740 __ mov(v0, a0);
2741 __ bind(&not_name);
2742
2743 Label not_heap_number;
2744 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2745 __ bind(&is_number);
2746 NumberToStringStub stub(isolate());
2747 __ TailCallStub(&stub);
2748 __ bind(&not_heap_number);
2749
2750 Label not_oddball;
2751 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2752 __ Ret(USE_DELAY_SLOT);
2753 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
2754 __ bind(&not_oddball);
2755
2756 __ push(a0); // Push argument.
2757 __ TailCallRuntime(Runtime::kToName);
2758}
2759
2760
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002761void StringHelper::GenerateFlatOneByteStringEquals(
2762 MacroAssembler* masm, Register left, Register right, Register scratch1,
2763 Register scratch2, Register scratch3) {
2764 Register length = scratch1;
2765
2766 // Compare lengths.
2767 Label strings_not_equal, check_zero_length;
2768 __ ld(length, FieldMemOperand(left, String::kLengthOffset));
2769 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset));
2770 __ Branch(&check_zero_length, eq, length, Operand(scratch2));
2771 __ bind(&strings_not_equal);
2772 // Can not put li in delayslot, it has multi instructions.
2773 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL)));
2774 __ Ret();
2775
2776 // Check if the length is zero.
2777 Label compare_chars;
2778 __ bind(&check_zero_length);
2779 STATIC_ASSERT(kSmiTag == 0);
2780 __ Branch(&compare_chars, ne, length, Operand(zero_reg));
2781 DCHECK(is_int16((intptr_t)Smi::FromInt(EQUAL)));
2782 __ Ret(USE_DELAY_SLOT);
2783 __ li(v0, Operand(Smi::FromInt(EQUAL)));
2784
2785 // Compare characters.
2786 __ bind(&compare_chars);
2787
2788 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, scratch3,
2789 v0, &strings_not_equal);
2790
2791 // Characters are equal.
2792 __ Ret(USE_DELAY_SLOT);
2793 __ li(v0, Operand(Smi::FromInt(EQUAL)));
2794}
2795
2796
2797void StringHelper::GenerateCompareFlatOneByteStrings(
2798 MacroAssembler* masm, Register left, Register right, Register scratch1,
2799 Register scratch2, Register scratch3, Register scratch4) {
2800 Label result_not_equal, compare_lengths;
2801 // Find minimum length and length difference.
2802 __ ld(scratch1, FieldMemOperand(left, String::kLengthOffset));
2803 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset));
2804 __ Dsubu(scratch3, scratch1, Operand(scratch2));
2805 Register length_delta = scratch3;
2806 __ slt(scratch4, scratch2, scratch1);
2807 __ Movn(scratch1, scratch2, scratch4);
2808 Register min_length = scratch1;
2809 STATIC_ASSERT(kSmiTag == 0);
2810 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg));
2811
2812 // Compare loop.
2813 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2814 scratch4, v0, &result_not_equal);
2815
2816 // Compare lengths - strings up to min-length are equal.
2817 __ bind(&compare_lengths);
2818 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
2819 // Use length_delta as result if it's zero.
2820 __ mov(scratch2, length_delta);
2821 __ mov(scratch4, zero_reg);
2822 __ mov(v0, zero_reg);
2823
2824 __ bind(&result_not_equal);
2825 // Conditionally update the result based either on length_delta or
2826 // the last comparion performed in the loop above.
2827 Label ret;
2828 __ Branch(&ret, eq, scratch2, Operand(scratch4));
2829 __ li(v0, Operand(Smi::FromInt(GREATER)));
2830 __ Branch(&ret, gt, scratch2, Operand(scratch4));
2831 __ li(v0, Operand(Smi::FromInt(LESS)));
2832 __ bind(&ret);
2833 __ Ret();
2834}
2835
2836
2837void StringHelper::GenerateOneByteCharsCompareLoop(
2838 MacroAssembler* masm, Register left, Register right, Register length,
2839 Register scratch1, Register scratch2, Register scratch3,
2840 Label* chars_not_equal) {
2841 // Change index to run from -length to -1 by adding length to string
2842 // start. This means that loop ends when index reaches zero, which
2843 // doesn't need an additional compare.
2844 __ SmiUntag(length);
2845 __ Daddu(scratch1, length,
2846 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2847 __ Daddu(left, left, Operand(scratch1));
2848 __ Daddu(right, right, Operand(scratch1));
2849 __ Dsubu(length, zero_reg, length);
2850 Register index = length; // index = -length;
2851
2852
2853 // Compare loop.
2854 Label loop;
2855 __ bind(&loop);
2856 __ Daddu(scratch3, left, index);
2857 __ lbu(scratch1, MemOperand(scratch3));
2858 __ Daddu(scratch3, right, index);
2859 __ lbu(scratch2, MemOperand(scratch3));
2860 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2));
2861 __ Daddu(index, index, 1);
2862 __ Branch(&loop, ne, index, Operand(zero_reg));
2863}
2864
2865
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002866void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2867 // ----------- S t a t e -------------
2868 // -- a1 : left
2869 // -- a0 : right
2870 // -- ra : return address
2871 // -----------------------------------
2872
2873 // Load a2 with the allocation site. We stick an undefined dummy value here
2874 // and replace it with the real allocation site later when we instantiate this
2875 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2876 __ li(a2, handle(isolate()->heap()->undefined_value()));
2877
2878 // Make sure that we actually patched the allocation site.
2879 if (FLAG_debug_code) {
2880 __ And(at, a2, Operand(kSmiTagMask));
2881 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg));
2882 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset));
2883 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2884 __ Assert(eq, kExpectedAllocationSite, a4, Operand(at));
2885 }
2886
2887 // Tail call into the stub that handles binary operations with allocation
2888 // sites.
2889 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2890 __ TailCallStub(&stub);
2891}
2892
2893
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002894void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2895 DCHECK_EQ(CompareICState::BOOLEAN, state());
2896 Label miss;
2897
2898 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
2899 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002900 if (!Token::IsEqualityOp(op())) {
2901 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset));
2902 __ AssertSmi(a1);
2903 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset));
2904 __ AssertSmi(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002905 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002906 __ Ret(USE_DELAY_SLOT);
2907 __ Dsubu(v0, a1, a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002908
2909 __ bind(&miss);
2910 GenerateMiss(masm);
2911}
2912
2913
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002914void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2915 DCHECK(state() == CompareICState::SMI);
2916 Label miss;
2917 __ Or(a2, a1, a0);
2918 __ JumpIfNotSmi(a2, &miss);
2919
2920 if (GetCondition() == eq) {
2921 // For equality we do not care about the sign of the result.
2922 __ Ret(USE_DELAY_SLOT);
2923 __ Dsubu(v0, a0, a1);
2924 } else {
2925 // Untag before subtracting to avoid handling overflow.
2926 __ SmiUntag(a1);
2927 __ SmiUntag(a0);
2928 __ Ret(USE_DELAY_SLOT);
2929 __ Dsubu(v0, a1, a0);
2930 }
2931
2932 __ bind(&miss);
2933 GenerateMiss(masm);
2934}
2935
2936
2937void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2938 DCHECK(state() == CompareICState::NUMBER);
2939
2940 Label generic_stub;
2941 Label unordered, maybe_undefined1, maybe_undefined2;
2942 Label miss;
2943
2944 if (left() == CompareICState::SMI) {
2945 __ JumpIfNotSmi(a1, &miss);
2946 }
2947 if (right() == CompareICState::SMI) {
2948 __ JumpIfNotSmi(a0, &miss);
2949 }
2950
2951 // Inlining the double comparison and falling back to the general compare
2952 // stub if NaN is involved.
2953 // Load left and right operand.
2954 Label done, left, left_smi, right_smi;
2955 __ JumpIfSmi(a0, &right_smi);
2956 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
2957 DONT_DO_SMI_CHECK);
2958 __ Dsubu(a2, a0, Operand(kHeapObjectTag));
2959 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset));
2960 __ Branch(&left);
2961 __ bind(&right_smi);
2962 __ SmiUntag(a2, a0); // Can't clobber a0 yet.
2963 FPURegister single_scratch = f6;
2964 __ mtc1(a2, single_scratch);
2965 __ cvt_d_w(f2, single_scratch);
2966
2967 __ bind(&left);
2968 __ JumpIfSmi(a1, &left_smi);
2969 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
2970 DONT_DO_SMI_CHECK);
2971 __ Dsubu(a2, a1, Operand(kHeapObjectTag));
2972 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset));
2973 __ Branch(&done);
2974 __ bind(&left_smi);
2975 __ SmiUntag(a2, a1); // Can't clobber a1 yet.
2976 single_scratch = f8;
2977 __ mtc1(a2, single_scratch);
2978 __ cvt_d_w(f0, single_scratch);
2979
2980 __ bind(&done);
2981
2982 // Return a result of -1, 0, or 1, or use CompareStub for NaNs.
2983 Label fpu_eq, fpu_lt;
2984 // Test if equal, and also handle the unordered/NaN case.
2985 __ BranchF(&fpu_eq, &unordered, eq, f0, f2);
2986
2987 // Test if less (unordered case is already handled).
2988 __ BranchF(&fpu_lt, NULL, lt, f0, f2);
2989
2990 // Otherwise it's greater, so just fall thru, and return.
2991 DCHECK(is_int16(GREATER) && is_int16(EQUAL) && is_int16(LESS));
2992 __ Ret(USE_DELAY_SLOT);
2993 __ li(v0, Operand(GREATER));
2994
2995 __ bind(&fpu_eq);
2996 __ Ret(USE_DELAY_SLOT);
2997 __ li(v0, Operand(EQUAL));
2998
2999 __ bind(&fpu_lt);
3000 __ Ret(USE_DELAY_SLOT);
3001 __ li(v0, Operand(LESS));
3002
3003 __ bind(&unordered);
3004 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003005 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003006 CompareICState::GENERIC, CompareICState::GENERIC);
3007 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3008
3009 __ bind(&maybe_undefined1);
3010 if (Token::IsOrderedRelationalCompareOp(op())) {
3011 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3012 __ Branch(&miss, ne, a0, Operand(at));
3013 __ JumpIfSmi(a1, &unordered);
3014 __ GetObjectType(a1, a2, a2);
3015 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
3016 __ jmp(&unordered);
3017 }
3018
3019 __ bind(&maybe_undefined2);
3020 if (Token::IsOrderedRelationalCompareOp(op())) {
3021 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3022 __ Branch(&unordered, eq, a1, Operand(at));
3023 }
3024
3025 __ bind(&miss);
3026 GenerateMiss(masm);
3027}
3028
3029
3030void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3031 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3032 Label miss;
3033
3034 // Registers containing left and right operands respectively.
3035 Register left = a1;
3036 Register right = a0;
3037 Register tmp1 = a2;
3038 Register tmp2 = a3;
3039
3040 // Check that both operands are heap objects.
3041 __ JumpIfEitherSmi(left, right, &miss);
3042
3043 // Check that both operands are internalized strings.
3044 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3045 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3046 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3047 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3048 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3049 __ Or(tmp1, tmp1, Operand(tmp2));
3050 __ And(at, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3051 __ Branch(&miss, ne, at, Operand(zero_reg));
3052
3053 // Make sure a0 is non-zero. At this point input operands are
3054 // guaranteed to be non-zero.
3055 DCHECK(right.is(a0));
3056 STATIC_ASSERT(EQUAL == 0);
3057 STATIC_ASSERT(kSmiTag == 0);
3058 __ mov(v0, right);
3059 // Internalized strings are compared by identity.
3060 __ Ret(ne, left, Operand(right));
3061 DCHECK(is_int16(EQUAL));
3062 __ Ret(USE_DELAY_SLOT);
3063 __ li(v0, Operand(Smi::FromInt(EQUAL)));
3064
3065 __ bind(&miss);
3066 GenerateMiss(masm);
3067}
3068
3069
3070void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3071 DCHECK(state() == CompareICState::UNIQUE_NAME);
3072 DCHECK(GetCondition() == eq);
3073 Label miss;
3074
3075 // Registers containing left and right operands respectively.
3076 Register left = a1;
3077 Register right = a0;
3078 Register tmp1 = a2;
3079 Register tmp2 = a3;
3080
3081 // Check that both operands are heap objects.
3082 __ JumpIfEitherSmi(left, right, &miss);
3083
3084 // Check that both operands are unique names. This leaves the instance
3085 // types loaded in tmp1 and tmp2.
3086 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3087 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3088 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3089 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3090
3091 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
3092 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
3093
3094 // Use a0 as result
3095 __ mov(v0, a0);
3096
3097 // Unique names are compared by identity.
3098 Label done;
3099 __ Branch(&done, ne, left, Operand(right));
3100 // Make sure a0 is non-zero. At this point input operands are
3101 // guaranteed to be non-zero.
3102 DCHECK(right.is(a0));
3103 STATIC_ASSERT(EQUAL == 0);
3104 STATIC_ASSERT(kSmiTag == 0);
3105 __ li(v0, Operand(Smi::FromInt(EQUAL)));
3106 __ bind(&done);
3107 __ Ret();
3108
3109 __ bind(&miss);
3110 GenerateMiss(masm);
3111}
3112
3113
3114void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3115 DCHECK(state() == CompareICState::STRING);
3116 Label miss;
3117
3118 bool equality = Token::IsEqualityOp(op());
3119
3120 // Registers containing left and right operands respectively.
3121 Register left = a1;
3122 Register right = a0;
3123 Register tmp1 = a2;
3124 Register tmp2 = a3;
3125 Register tmp3 = a4;
3126 Register tmp4 = a5;
3127 Register tmp5 = a6;
3128
3129 // Check that both operands are heap objects.
3130 __ JumpIfEitherSmi(left, right, &miss);
3131
3132 // Check that both operands are strings. This leaves the instance
3133 // types loaded in tmp1 and tmp2.
3134 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3135 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3136 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3137 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3138 STATIC_ASSERT(kNotStringTag != 0);
3139 __ Or(tmp3, tmp1, tmp2);
3140 __ And(tmp5, tmp3, Operand(kIsNotStringMask));
3141 __ Branch(&miss, ne, tmp5, Operand(zero_reg));
3142
3143 // Fast check for identical strings.
3144 Label left_ne_right;
3145 STATIC_ASSERT(EQUAL == 0);
3146 STATIC_ASSERT(kSmiTag == 0);
3147 __ Branch(&left_ne_right, ne, left, Operand(right));
3148 __ Ret(USE_DELAY_SLOT);
3149 __ mov(v0, zero_reg); // In the delay slot.
3150 __ bind(&left_ne_right);
3151
3152 // Handle not identical strings.
3153
3154 // Check that both strings are internalized strings. If they are, we're done
3155 // because we already know they are not identical. We know they are both
3156 // strings.
3157 if (equality) {
3158 DCHECK(GetCondition() == eq);
3159 STATIC_ASSERT(kInternalizedTag == 0);
3160 __ Or(tmp3, tmp1, Operand(tmp2));
3161 __ And(tmp5, tmp3, Operand(kIsNotInternalizedMask));
3162 Label is_symbol;
3163 __ Branch(&is_symbol, ne, tmp5, Operand(zero_reg));
3164 // Make sure a0 is non-zero. At this point input operands are
3165 // guaranteed to be non-zero.
3166 DCHECK(right.is(a0));
3167 __ Ret(USE_DELAY_SLOT);
3168 __ mov(v0, a0); // In the delay slot.
3169 __ bind(&is_symbol);
3170 }
3171
3172 // Check that both strings are sequential one_byte.
3173 Label runtime;
3174 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
3175 &runtime);
3176
3177 // Compare flat one_byte strings. Returns when done.
3178 if (equality) {
3179 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, tmp2,
3180 tmp3);
3181 } else {
3182 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3183 tmp2, tmp3, tmp4);
3184 }
3185
3186 // Handle more complex cases in runtime.
3187 __ bind(&runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003188 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01003189 {
3190 FrameScope scope(masm, StackFrame::INTERNAL);
3191 __ Push(left, right);
3192 __ CallRuntime(Runtime::kStringEqual);
3193 }
3194 __ LoadRoot(a0, Heap::kTrueValueRootIndex);
3195 __ Ret(USE_DELAY_SLOT);
3196 __ Subu(v0, v0, a0); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003197 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003198 __ Push(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003199 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003200 }
3201
3202 __ bind(&miss);
3203 GenerateMiss(masm);
3204}
3205
3206
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003207void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3208 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003209 Label miss;
3210 __ And(a2, a1, Operand(a0));
3211 __ JumpIfSmi(a2, &miss);
3212
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003213 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003214 __ GetObjectType(a0, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003215 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003216 __ GetObjectType(a1, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003217 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003218
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003219 DCHECK_EQ(eq, GetCondition());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003220 __ Ret(USE_DELAY_SLOT);
3221 __ dsubu(v0, a0, a1);
3222
3223 __ bind(&miss);
3224 GenerateMiss(masm);
3225}
3226
3227
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003228void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003229 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003230 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003231 __ And(a2, a1, a0);
3232 __ JumpIfSmi(a2, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003233 __ GetWeakValue(a4, cell);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003234 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
3235 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003236 __ Branch(&miss, ne, a2, Operand(a4));
3237 __ Branch(&miss, ne, a3, Operand(a4));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003238
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003239 if (Token::IsEqualityOp(op())) {
3240 __ Ret(USE_DELAY_SLOT);
3241 __ dsubu(v0, a0, a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003242 } else {
3243 if (op() == Token::LT || op() == Token::LTE) {
3244 __ li(a2, Operand(Smi::FromInt(GREATER)));
3245 } else {
3246 __ li(a2, Operand(Smi::FromInt(LESS)));
3247 }
3248 __ Push(a1, a0, a2);
3249 __ TailCallRuntime(Runtime::kCompare);
3250 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003251
3252 __ bind(&miss);
3253 GenerateMiss(masm);
3254}
3255
3256
3257void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3258 {
3259 // Call the runtime system in a fresh internal frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003260 FrameScope scope(masm, StackFrame::INTERNAL);
3261 __ Push(a1, a0);
3262 __ Push(ra, a1, a0);
3263 __ li(a4, Operand(Smi::FromInt(op())));
3264 __ daddiu(sp, sp, -kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003265 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs,
3266 USE_DELAY_SLOT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003267 __ sd(a4, MemOperand(sp)); // In the delay slot.
3268 // Compute the entry point of the rewritten stub.
3269 __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
3270 // Restore registers.
3271 __ Pop(a1, a0, ra);
3272 }
3273 __ Jump(a2);
3274}
3275
3276
3277void DirectCEntryStub::Generate(MacroAssembler* masm) {
3278 // Make place for arguments to fit C calling convention. Most of the callers
3279 // of DirectCEntryStub::GenerateCall are using EnterExitFrame/LeaveExitFrame
3280 // so they handle stack restoring and we don't have to do that here.
3281 // Any caller of DirectCEntryStub::GenerateCall must take care of dropping
3282 // kCArgsSlotsSize stack space after the call.
3283 __ daddiu(sp, sp, -kCArgsSlotsSize);
3284 // Place the return address on the stack, making the call
3285 // GC safe. The RegExp backend also relies on this.
3286 __ sd(ra, MemOperand(sp, kCArgsSlotsSize));
3287 __ Call(t9); // Call the C++ function.
3288 __ ld(t9, MemOperand(sp, kCArgsSlotsSize));
3289
3290 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
3291 // In case of an error the return address may point to a memory area
3292 // filled with kZapValue by the GC.
3293 // Dereference the address and check for this.
3294 __ Uld(a4, MemOperand(t9));
3295 __ Assert(ne, kReceivedInvalidReturnAddress, a4,
3296 Operand(reinterpret_cast<uint64_t>(kZapValue)));
3297 }
3298 __ Jump(t9);
3299}
3300
3301
3302void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
3303 Register target) {
3304 intptr_t loc =
3305 reinterpret_cast<intptr_t>(GetCode().location());
3306 __ Move(t9, target);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003307 __ li(at, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE);
3308 __ Call(at);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003309}
3310
3311
3312void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3313 Label* miss,
3314 Label* done,
3315 Register receiver,
3316 Register properties,
3317 Handle<Name> name,
3318 Register scratch0) {
3319 DCHECK(name->IsUniqueName());
3320 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3321 // not equal to the name and kProbes-th slot is not used (its name is the
3322 // undefined value), it guarantees the hash table doesn't contain the
3323 // property. It's true even if some slots represent deleted properties
3324 // (their names are the hole value).
3325 for (int i = 0; i < kInlinedProbes; i++) {
3326 // scratch0 points to properties hash.
3327 // Compute the masked index: (hash + i + i * i) & mask.
3328 Register index = scratch0;
3329 // Capacity is smi 2^n.
3330 __ SmiLoadUntag(index, FieldMemOperand(properties, kCapacityOffset));
3331 __ Dsubu(index, index, Operand(1));
3332 __ And(index, index,
3333 Operand(name->Hash() + NameDictionary::GetProbeOffset(i)));
3334
3335 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003336 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003337 __ Dlsa(index, index, index, 1); // index *= 3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003338
3339 Register entity_name = scratch0;
3340 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003341 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003342 Register tmp = properties;
3343
Ben Murdoch097c5b22016-05-18 11:27:45 +01003344 __ Dlsa(tmp, properties, index, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003345 __ ld(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
3346
3347 DCHECK(!tmp.is(entity_name));
3348 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3349 __ Branch(done, eq, entity_name, Operand(tmp));
3350
3351 // Load the hole ready for use below:
3352 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
3353
3354 // Stop if found the property.
3355 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name)));
3356
3357 Label good;
3358 __ Branch(&good, eq, entity_name, Operand(tmp));
3359
3360 // Check if the entry name is not a unique name.
3361 __ ld(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
3362 __ lbu(entity_name,
3363 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
3364 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3365 __ bind(&good);
3366
3367 // Restore the properties.
3368 __ ld(properties,
3369 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
3370 }
3371
3372 const int spill_mask =
3373 (ra.bit() | a6.bit() | a5.bit() | a4.bit() | a3.bit() |
3374 a2.bit() | a1.bit() | a0.bit() | v0.bit());
3375
3376 __ MultiPush(spill_mask);
3377 __ ld(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
3378 __ li(a1, Operand(Handle<Name>(name)));
3379 NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
3380 __ CallStub(&stub);
3381 __ mov(at, v0);
3382 __ MultiPop(spill_mask);
3383
3384 __ Branch(done, eq, at, Operand(zero_reg));
3385 __ Branch(miss, ne, at, Operand(zero_reg));
3386}
3387
3388
3389// Probe the name dictionary in the |elements| register. Jump to the
3390// |done| label if a property with the given name is found. Jump to
3391// the |miss| label otherwise.
3392// If lookup was successful |scratch2| will be equal to elements + 4 * index.
3393void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3394 Label* miss,
3395 Label* done,
3396 Register elements,
3397 Register name,
3398 Register scratch1,
3399 Register scratch2) {
3400 DCHECK(!elements.is(scratch1));
3401 DCHECK(!elements.is(scratch2));
3402 DCHECK(!name.is(scratch1));
3403 DCHECK(!name.is(scratch2));
3404
3405 __ AssertName(name);
3406
3407 // Compute the capacity mask.
3408 __ ld(scratch1, FieldMemOperand(elements, kCapacityOffset));
3409 __ SmiUntag(scratch1);
3410 __ Dsubu(scratch1, scratch1, Operand(1));
3411
3412 // Generate an unrolled loop that performs a few probes before
3413 // giving up. Measurements done on Gmail indicate that 2 probes
3414 // cover ~93% of loads from dictionaries.
3415 for (int i = 0; i < kInlinedProbes; i++) {
3416 // Compute the masked index: (hash + i + i * i) & mask.
3417 __ lwu(scratch2, FieldMemOperand(name, Name::kHashFieldOffset));
3418 if (i > 0) {
3419 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3420 // the hash in a separate instruction. The value hash + i + i * i is right
3421 // shifted in the following and instruction.
3422 DCHECK(NameDictionary::GetProbeOffset(i) <
3423 1 << (32 - Name::kHashFieldOffset));
3424 __ Daddu(scratch2, scratch2, Operand(
3425 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
3426 }
3427 __ dsrl(scratch2, scratch2, Name::kHashShift);
3428 __ And(scratch2, scratch1, scratch2);
3429
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003430 // Scale the index by multiplying by the entry size.
3431 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003432 // scratch2 = scratch2 * 3.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003433 __ Dlsa(scratch2, scratch2, scratch2, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003434
3435 // Check if the key is identical to the name.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003436 __ Dlsa(scratch2, elements, scratch2, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003437 __ ld(at, FieldMemOperand(scratch2, kElementsStartOffset));
3438 __ Branch(done, eq, name, Operand(at));
3439 }
3440
3441 const int spill_mask =
3442 (ra.bit() | a6.bit() | a5.bit() | a4.bit() |
3443 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
3444 ~(scratch1.bit() | scratch2.bit());
3445
3446 __ MultiPush(spill_mask);
3447 if (name.is(a0)) {
3448 DCHECK(!elements.is(a1));
3449 __ Move(a1, name);
3450 __ Move(a0, elements);
3451 } else {
3452 __ Move(a0, elements);
3453 __ Move(a1, name);
3454 }
3455 NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
3456 __ CallStub(&stub);
3457 __ mov(scratch2, a2);
3458 __ mov(at, v0);
3459 __ MultiPop(spill_mask);
3460
3461 __ Branch(done, ne, at, Operand(zero_reg));
3462 __ Branch(miss, eq, at, Operand(zero_reg));
3463}
3464
3465
3466void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3467 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3468 // we cannot call anything that could cause a GC from this stub.
3469 // Registers:
3470 // result: NameDictionary to probe
3471 // a1: key
3472 // dictionary: NameDictionary to probe.
3473 // index: will hold an index of entry if lookup is successful.
3474 // might alias with result_.
3475 // Returns:
3476 // result_ is zero if lookup failed, non zero otherwise.
3477
3478 Register result = v0;
3479 Register dictionary = a0;
3480 Register key = a1;
3481 Register index = a2;
3482 Register mask = a3;
3483 Register hash = a4;
3484 Register undefined = a5;
3485 Register entry_key = a6;
3486
3487 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3488
3489 __ ld(mask, FieldMemOperand(dictionary, kCapacityOffset));
3490 __ SmiUntag(mask);
3491 __ Dsubu(mask, mask, Operand(1));
3492
3493 __ lwu(hash, FieldMemOperand(key, Name::kHashFieldOffset));
3494
3495 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3496
3497 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3498 // Compute the masked index: (hash + i + i * i) & mask.
3499 // Capacity is smi 2^n.
3500 if (i > 0) {
3501 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3502 // the hash in a separate instruction. The value hash + i + i * i is right
3503 // shifted in the following and instruction.
3504 DCHECK(NameDictionary::GetProbeOffset(i) <
3505 1 << (32 - Name::kHashFieldOffset));
3506 __ Daddu(index, hash, Operand(
3507 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
3508 } else {
3509 __ mov(index, hash);
3510 }
3511 __ dsrl(index, index, Name::kHashShift);
3512 __ And(index, mask, index);
3513
3514 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003515 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003516 // index *= 3.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003517 __ Dlsa(index, index, index, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003518
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003519 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003520 __ Dlsa(index, dictionary, index, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003521 __ ld(entry_key, FieldMemOperand(index, kElementsStartOffset));
3522
3523 // Having undefined at this place means the name is not contained.
3524 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
3525
3526 // Stop if found the property.
3527 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
3528
3529 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3530 // Check if the entry name is not a unique name.
3531 __ ld(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
3532 __ lbu(entry_key,
3533 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
3534 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
3535 }
3536 }
3537
3538 __ bind(&maybe_in_dictionary);
3539 // If we are doing negative lookup then probing failure should be
3540 // treated as a lookup success. For positive lookup probing failure
3541 // should be treated as lookup failure.
3542 if (mode() == POSITIVE_LOOKUP) {
3543 __ Ret(USE_DELAY_SLOT);
3544 __ mov(result, zero_reg);
3545 }
3546
3547 __ bind(&in_dictionary);
3548 __ Ret(USE_DELAY_SLOT);
3549 __ li(result, 1);
3550
3551 __ bind(&not_in_dictionary);
3552 __ Ret(USE_DELAY_SLOT);
3553 __ mov(result, zero_reg);
3554}
3555
3556
3557void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3558 Isolate* isolate) {
3559 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3560 stub1.GetCode();
3561 // Hydrogen code stubs need stub2 at snapshot time.
3562 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3563 stub2.GetCode();
3564}
3565
3566
3567// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3568// the value has just been written into the object, now this stub makes sure
3569// we keep the GC informed. The word in the object where the value has been
3570// written is in the address register.
3571void RecordWriteStub::Generate(MacroAssembler* masm) {
3572 Label skip_to_incremental_noncompacting;
3573 Label skip_to_incremental_compacting;
3574
3575 // The first two branch+nop instructions are generated with labels so as to
3576 // get the offset fixed up correctly by the bind(Label*) call. We patch it
3577 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
3578 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
3579 // incremental heap marking.
3580 // See RecordWriteStub::Patch for details.
3581 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
3582 __ nop();
3583 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
3584 __ nop();
3585
3586 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3587 __ RememberedSetHelper(object(),
3588 address(),
3589 value(),
3590 save_fp_regs_mode(),
3591 MacroAssembler::kReturnAtEnd);
3592 }
3593 __ Ret();
3594
3595 __ bind(&skip_to_incremental_noncompacting);
3596 GenerateIncremental(masm, INCREMENTAL);
3597
3598 __ bind(&skip_to_incremental_compacting);
3599 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3600
3601 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3602 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3603
3604 PatchBranchIntoNop(masm, 0);
3605 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
3606}
3607
3608
3609void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3610 regs_.Save(masm);
3611
3612 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3613 Label dont_need_remembered_set;
3614
3615 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0));
3616 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3617 regs_.scratch0(),
3618 &dont_need_remembered_set);
3619
Ben Murdoch097c5b22016-05-18 11:27:45 +01003620 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3621 &dont_need_remembered_set);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003622
3623 // First notify the incremental marker if necessary, then update the
3624 // remembered set.
3625 CheckNeedsToInformIncrementalMarker(
3626 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
3627 InformIncrementalMarker(masm);
3628 regs_.Restore(masm);
3629 __ RememberedSetHelper(object(),
3630 address(),
3631 value(),
3632 save_fp_regs_mode(),
3633 MacroAssembler::kReturnAtEnd);
3634
3635 __ bind(&dont_need_remembered_set);
3636 }
3637
3638 CheckNeedsToInformIncrementalMarker(
3639 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
3640 InformIncrementalMarker(masm);
3641 regs_.Restore(masm);
3642 __ Ret();
3643}
3644
3645
3646void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3647 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
3648 int argument_count = 3;
3649 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3650 Register address =
3651 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
3652 DCHECK(!address.is(regs_.object()));
3653 DCHECK(!address.is(a0));
3654 __ Move(address, regs_.address());
3655 __ Move(a0, regs_.object());
3656 __ Move(a1, address);
3657 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
3658
3659 AllowExternalCallThatCantCauseGC scope(masm);
3660 __ CallCFunction(
3661 ExternalReference::incremental_marking_record_write_function(isolate()),
3662 argument_count);
3663 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
3664}
3665
3666
3667void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3668 MacroAssembler* masm,
3669 OnNoNeedToInformIncrementalMarker on_no_need,
3670 Mode mode) {
3671 Label on_black;
3672 Label need_incremental;
3673 Label need_incremental_pop_scratch;
3674
3675 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
3676 __ ld(regs_.scratch1(),
3677 MemOperand(regs_.scratch0(),
3678 MemoryChunk::kWriteBarrierCounterOffset));
3679 __ Dsubu(regs_.scratch1(), regs_.scratch1(), Operand(1));
3680 __ sd(regs_.scratch1(),
3681 MemOperand(regs_.scratch0(),
3682 MemoryChunk::kWriteBarrierCounterOffset));
3683 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg));
3684
3685 // Let's look at the color of the object: If it is not black we don't have
3686 // to inform the incremental marker.
3687 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
3688
3689 regs_.Restore(masm);
3690 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3691 __ RememberedSetHelper(object(),
3692 address(),
3693 value(),
3694 save_fp_regs_mode(),
3695 MacroAssembler::kReturnAtEnd);
3696 } else {
3697 __ Ret();
3698 }
3699
3700 __ bind(&on_black);
3701
3702 // Get the value from the slot.
3703 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0));
3704
3705 if (mode == INCREMENTAL_COMPACTION) {
3706 Label ensure_not_white;
3707
3708 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3709 regs_.scratch1(), // Scratch.
3710 MemoryChunk::kEvacuationCandidateMask,
3711 eq,
3712 &ensure_not_white);
3713
3714 __ CheckPageFlag(regs_.object(),
3715 regs_.scratch1(), // Scratch.
3716 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3717 eq,
3718 &need_incremental);
3719
3720 __ bind(&ensure_not_white);
3721 }
3722
3723 // We need extra registers for this, so we push the object and the address
3724 // register temporarily.
3725 __ Push(regs_.object(), regs_.address());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003726 __ JumpIfWhite(regs_.scratch0(), // The value.
3727 regs_.scratch1(), // Scratch.
3728 regs_.object(), // Scratch.
3729 regs_.address(), // Scratch.
3730 &need_incremental_pop_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003731 __ Pop(regs_.object(), regs_.address());
3732
3733 regs_.Restore(masm);
3734 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3735 __ RememberedSetHelper(object(),
3736 address(),
3737 value(),
3738 save_fp_regs_mode(),
3739 MacroAssembler::kReturnAtEnd);
3740 } else {
3741 __ Ret();
3742 }
3743
3744 __ bind(&need_incremental_pop_scratch);
3745 __ Pop(regs_.object(), regs_.address());
3746
3747 __ bind(&need_incremental);
3748
3749 // Fall through when we need to inform the incremental marker.
3750}
3751
3752
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003753void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3754 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3755 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3756 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003757 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003758 __ ld(a1, MemOperand(fp, parameter_count_offset));
3759 if (function_mode() == JS_FUNCTION_STUB_MODE) {
3760 __ Daddu(a1, a1, Operand(1));
3761 }
3762 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3763 __ dsll(a1, a1, kPointerSizeLog2);
3764 __ Ret(USE_DELAY_SLOT);
3765 __ Daddu(sp, sp, a1);
3766}
3767
3768
3769void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003770 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3771 LoadICStub stub(isolate(), state());
3772 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003773}
3774
3775
3776void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003777 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3778 KeyedLoadICStub stub(isolate(), state());
3779 stub.GenerateForTrampoline(masm);
3780}
3781
3782
3783void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3784 __ EmitLoadTypeFeedbackVector(a2);
3785 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003786 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3787}
3788
3789
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003790void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3791
3792
3793void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3794 GenerateImpl(masm, true);
3795}
3796
3797
3798static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3799 Register receiver_map, Register scratch1,
3800 Register scratch2, bool is_polymorphic,
3801 Label* miss) {
3802 // feedback initially contains the feedback array
3803 Label next_loop, prepare_next;
3804 Label start_polymorphic;
3805
3806 Register cached_map = scratch1;
3807
3808 __ ld(cached_map,
3809 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3810 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3811 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map));
3812 // found, now call handler.
3813 Register handler = feedback;
3814 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3815 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3816 __ Jump(t9);
3817
3818 Register length = scratch2;
3819 __ bind(&start_polymorphic);
3820 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset));
3821 if (!is_polymorphic) {
3822 // If the IC could be monomorphic we have to make sure we don't go past the
3823 // end of the feedback array.
3824 __ Branch(miss, eq, length, Operand(Smi::FromInt(2)));
3825 }
3826
3827 Register too_far = length;
3828 Register pointer_reg = feedback;
3829
3830 // +-----+------+------+-----+-----+ ... ----+
3831 // | map | len | wm0 | h0 | wm1 | hN |
3832 // +-----+------+------+-----+-----+ ... ----+
3833 // 0 1 2 len-1
3834 // ^ ^
3835 // | |
3836 // pointer_reg too_far
3837 // aka feedback scratch2
3838 // also need receiver_map
3839 // use cached_map (scratch1) to look in the weak map values.
3840 __ SmiScale(too_far, length, kPointerSizeLog2);
3841 __ Daddu(too_far, feedback, Operand(too_far));
3842 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3843 __ Daddu(pointer_reg, feedback,
3844 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag));
3845
3846 __ bind(&next_loop);
3847 __ ld(cached_map, MemOperand(pointer_reg));
3848 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3849 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
3850 __ ld(handler, MemOperand(pointer_reg, kPointerSize));
3851 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3852 __ Jump(t9);
3853
3854 __ bind(&prepare_next);
3855 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2));
3856 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
3857
3858 // We exhausted our array of map handler pairs.
3859 __ Branch(miss);
3860}
3861
3862
3863static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3864 Register receiver_map, Register feedback,
3865 Register vector, Register slot,
3866 Register scratch, Label* compare_map,
3867 Label* load_smi_map, Label* try_array) {
3868 __ JumpIfSmi(receiver, load_smi_map);
3869 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
3870 __ bind(compare_map);
3871 Register cached_map = scratch;
3872 // Move the weak map into the weak_cell register.
3873 __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset));
3874 __ Branch(try_array, ne, cached_map, Operand(receiver_map));
3875 Register handler = feedback;
3876 __ SmiScale(handler, slot, kPointerSizeLog2);
3877 __ Daddu(handler, vector, Operand(handler));
3878 __ ld(handler,
3879 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
3880 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag);
3881 __ Jump(t9);
3882}
3883
3884
3885void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3886 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
3887 Register name = LoadWithVectorDescriptor::NameRegister(); // a2
3888 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
3889 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
3890 Register feedback = a4;
3891 Register receiver_map = a5;
3892 Register scratch1 = a6;
3893
3894 __ SmiScale(feedback, slot, kPointerSizeLog2);
3895 __ Daddu(feedback, vector, Operand(feedback));
3896 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3897
3898 // Try to quickly handle the monomorphic case without knowing for sure
3899 // if we have a weak cell in feedback. We do know it's safe to look
3900 // at WeakCell::kValueOffset.
3901 Label try_array, load_smi_map, compare_map;
3902 Label not_array, miss;
3903 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3904 scratch1, &compare_map, &load_smi_map, &try_array);
3905
3906 // Is it a fixed array?
3907 __ bind(&try_array);
3908 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3909 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
3910 __ Branch(&not_array, ne, scratch1, Operand(at));
3911 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, true, &miss);
3912
3913 __ bind(&not_array);
3914 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
3915 __ Branch(&miss, ne, feedback, Operand(at));
Ben Murdochc5610432016-08-08 18:44:38 +01003916 Code::Flags code_flags =
3917 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003918 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
3919 receiver, name, feedback,
3920 receiver_map, scratch1, a7);
3921
3922 __ bind(&miss);
3923 LoadIC::GenerateMiss(masm);
3924
3925 __ bind(&load_smi_map);
3926 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3927 __ Branch(&compare_map);
3928}
3929
3930
3931void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3932 GenerateImpl(masm, false);
3933}
3934
3935
3936void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3937 GenerateImpl(masm, true);
3938}
3939
3940
3941void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3942 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
3943 Register key = LoadWithVectorDescriptor::NameRegister(); // a2
3944 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
3945 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
3946 Register feedback = a4;
3947 Register receiver_map = a5;
3948 Register scratch1 = a6;
3949
3950 __ SmiScale(feedback, slot, kPointerSizeLog2);
3951 __ Daddu(feedback, vector, Operand(feedback));
3952 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3953
3954 // Try to quickly handle the monomorphic case without knowing for sure
3955 // if we have a weak cell in feedback. We do know it's safe to look
3956 // at WeakCell::kValueOffset.
3957 Label try_array, load_smi_map, compare_map;
3958 Label not_array, miss;
3959 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3960 scratch1, &compare_map, &load_smi_map, &try_array);
3961
3962 __ bind(&try_array);
3963 // Is it a fixed array?
3964 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3965 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
3966 __ Branch(&not_array, ne, scratch1, Operand(at));
3967 // We have a polymorphic element handler.
3968 __ JumpIfNotSmi(key, &miss);
3969
3970 Label polymorphic, try_poly_name;
3971 __ bind(&polymorphic);
3972 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, true, &miss);
3973
3974 __ bind(&not_array);
3975 // Is it generic?
3976 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
3977 __ Branch(&try_poly_name, ne, feedback, Operand(at));
3978 Handle<Code> megamorphic_stub =
3979 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3980 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
3981
3982 __ bind(&try_poly_name);
3983 // We might have a name in feedback, and a fixed array in the next slot.
3984 __ Branch(&miss, ne, key, Operand(feedback));
3985 // If the name comparison succeeded, we know we have a fixed array with
3986 // at least one map/handler pair.
3987 __ SmiScale(feedback, slot, kPointerSizeLog2);
3988 __ Daddu(feedback, vector, Operand(feedback));
3989 __ ld(feedback,
3990 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
3991 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, false, &miss);
3992
3993 __ bind(&miss);
3994 KeyedLoadIC::GenerateMiss(masm);
3995
3996 __ bind(&load_smi_map);
3997 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3998 __ Branch(&compare_map);
3999}
4000
4001
4002void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4003 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4004 VectorStoreICStub stub(isolate(), state());
4005 stub.GenerateForTrampoline(masm);
4006}
4007
4008
4009void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4010 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4011 VectorKeyedStoreICStub stub(isolate(), state());
4012 stub.GenerateForTrampoline(masm);
4013}
4014
4015
4016void VectorStoreICStub::Generate(MacroAssembler* masm) {
4017 GenerateImpl(masm, false);
4018}
4019
4020
4021void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4022 GenerateImpl(masm, true);
4023}
4024
4025
4026void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4027 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1
4028 Register key = VectorStoreICDescriptor::NameRegister(); // a2
4029 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3
4030 Register slot = VectorStoreICDescriptor::SlotRegister(); // a4
4031 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0
4032 Register feedback = a5;
4033 Register receiver_map = a6;
4034 Register scratch1 = a7;
4035
4036 __ SmiScale(scratch1, slot, kPointerSizeLog2);
4037 __ Daddu(feedback, vector, Operand(scratch1));
4038 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4039
4040 // Try to quickly handle the monomorphic case without knowing for sure
4041 // if we have a weak cell in feedback. We do know it's safe to look
4042 // at WeakCell::kValueOffset.
4043 Label try_array, load_smi_map, compare_map;
4044 Label not_array, miss;
4045 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4046 scratch1, &compare_map, &load_smi_map, &try_array);
4047
4048 // Is it a fixed array?
4049 __ bind(&try_array);
4050 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4051 __ Branch(&not_array, ne, scratch1, Heap::kFixedArrayMapRootIndex);
4052
4053 Register scratch2 = t0;
4054 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true,
4055 &miss);
4056
4057 __ bind(&not_array);
4058 __ Branch(&miss, ne, feedback, Heap::kmegamorphic_symbolRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01004059 Code::Flags code_flags =
4060 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004061 masm->isolate()->stub_cache()->GenerateProbe(
4062 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map,
4063 scratch1, scratch2);
4064
4065 __ bind(&miss);
4066 StoreIC::GenerateMiss(masm);
4067
4068 __ bind(&load_smi_map);
4069 __ Branch(USE_DELAY_SLOT, &compare_map);
4070 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot.
4071}
4072
4073
4074void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
4075 GenerateImpl(masm, false);
4076}
4077
4078
4079void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4080 GenerateImpl(masm, true);
4081}
4082
4083
4084static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
4085 Register receiver_map, Register scratch1,
4086 Register scratch2, Label* miss) {
4087 // feedback initially contains the feedback array
4088 Label next_loop, prepare_next;
4089 Label start_polymorphic;
4090 Label transition_call;
4091
4092 Register cached_map = scratch1;
4093 Register too_far = scratch2;
4094 Register pointer_reg = feedback;
4095
4096 __ ld(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset));
4097
4098 // +-----+------+------+-----+-----+-----+ ... ----+
4099 // | map | len | wm0 | wt0 | h0 | wm1 | hN |
4100 // +-----+------+------+-----+-----+ ----+ ... ----+
4101 // 0 1 2 len-1
4102 // ^ ^
4103 // | |
4104 // pointer_reg too_far
4105 // aka feedback scratch2
4106 // also need receiver_map
4107 // use cached_map (scratch1) to look in the weak map values.
4108 __ SmiScale(too_far, too_far, kPointerSizeLog2);
4109 __ Daddu(too_far, feedback, Operand(too_far));
4110 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4111 __ Daddu(pointer_reg, feedback,
4112 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag));
4113
4114 __ bind(&next_loop);
4115 __ ld(cached_map, MemOperand(pointer_reg));
4116 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
4117 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
4118 // Is it a transitioning store?
4119 __ ld(too_far, MemOperand(pointer_reg, kPointerSize));
4120 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4121 __ Branch(&transition_call, ne, too_far, Operand(at));
4122
4123 __ ld(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2));
4124 __ Daddu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
4125 __ Jump(t9);
4126
4127 __ bind(&transition_call);
4128 __ ld(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset));
4129 __ JumpIfSmi(too_far, miss);
4130
4131 __ ld(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
4132 // Load the map into the correct register.
4133 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
4134 __ Move(feedback, too_far);
4135 __ Daddu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
4136 __ Jump(t9);
4137
4138 __ bind(&prepare_next);
4139 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 3));
4140 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
4141
4142 // We exhausted our array of map handler pairs.
4143 __ Branch(miss);
4144}
4145
4146
4147void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4148 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1
4149 Register key = VectorStoreICDescriptor::NameRegister(); // a2
4150 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3
4151 Register slot = VectorStoreICDescriptor::SlotRegister(); // a4
4152 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0
4153 Register feedback = a5;
4154 Register receiver_map = a6;
4155 Register scratch1 = a7;
4156
4157 __ SmiScale(scratch1, slot, kPointerSizeLog2);
4158 __ Daddu(feedback, vector, Operand(scratch1));
4159 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4160
4161 // Try to quickly handle the monomorphic case without knowing for sure
4162 // if we have a weak cell in feedback. We do know it's safe to look
4163 // at WeakCell::kValueOffset.
4164 Label try_array, load_smi_map, compare_map;
4165 Label not_array, miss;
4166 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4167 scratch1, &compare_map, &load_smi_map, &try_array);
4168
4169 __ bind(&try_array);
4170 // Is it a fixed array?
4171 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4172 __ Branch(&not_array, ne, scratch1, Heap::kFixedArrayMapRootIndex);
4173
4174 // We have a polymorphic element handler.
4175 Label try_poly_name;
4176
4177 Register scratch2 = t0;
4178
4179 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2,
4180 &miss);
4181
4182 __ bind(&not_array);
4183 // Is it generic?
4184 __ Branch(&try_poly_name, ne, feedback, Heap::kmegamorphic_symbolRootIndex);
4185 Handle<Code> megamorphic_stub =
4186 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4187 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
4188
4189 __ bind(&try_poly_name);
4190 // We might have a name in feedback, and a fixed array in the next slot.
4191 __ Branch(&miss, ne, key, Operand(feedback));
4192 // If the name comparison succeeded, we know we have a fixed array with
4193 // at least one map/handler pair.
4194 __ SmiScale(scratch1, slot, kPointerSizeLog2);
4195 __ Daddu(feedback, vector, Operand(scratch1));
4196 __ ld(feedback,
4197 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
4198 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false,
4199 &miss);
4200
4201 __ bind(&miss);
4202 KeyedStoreIC::GenerateMiss(masm);
4203
4204 __ bind(&load_smi_map);
4205 __ Branch(USE_DELAY_SLOT, &compare_map);
4206 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot.
4207}
4208
4209
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004210void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4211 if (masm->isolate()->function_entry_hook() != NULL) {
4212 ProfileEntryHookStub stub(masm->isolate());
4213 __ push(ra);
4214 __ CallStub(&stub);
4215 __ pop(ra);
4216 }
4217}
4218
4219
4220void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4221 // The entry hook is a "push ra" instruction, followed by a call.
4222 // Note: on MIPS "push" is 2 instruction
4223 const int32_t kReturnAddressDistanceFromFunctionStart =
4224 Assembler::kCallTargetAddressOffset + (2 * Assembler::kInstrSize);
4225
4226 // This should contain all kJSCallerSaved registers.
4227 const RegList kSavedRegs =
4228 kJSCallerSaved | // Caller saved registers.
4229 s5.bit(); // Saved stack pointer.
4230
4231 // We also save ra, so the count here is one higher than the mask indicates.
4232 const int32_t kNumSavedRegs = kNumJSCallerSaved + 2;
4233
4234 // Save all caller-save registers as this may be called from anywhere.
4235 __ MultiPush(kSavedRegs | ra.bit());
4236
4237 // Compute the function's address for the first argument.
4238 __ Dsubu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
4239
4240 // The caller's return address is above the saved temporaries.
4241 // Grab that for the second argument to the hook.
4242 __ Daddu(a1, sp, Operand(kNumSavedRegs * kPointerSize));
4243
4244 // Align the stack if necessary.
4245 int frame_alignment = masm->ActivationFrameAlignment();
4246 if (frame_alignment > kPointerSize) {
4247 __ mov(s5, sp);
4248 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
4249 __ And(sp, sp, Operand(-frame_alignment));
4250 }
4251
4252 __ Dsubu(sp, sp, kCArgsSlotsSize);
4253#if defined(V8_HOST_ARCH_MIPS) || defined(V8_HOST_ARCH_MIPS64)
4254 int64_t entry_hook =
4255 reinterpret_cast<int64_t>(isolate()->function_entry_hook());
4256 __ li(t9, Operand(entry_hook));
4257#else
4258 // Under the simulator we need to indirect the entry hook through a
4259 // trampoline function at a known address.
4260 // It additionally takes an isolate as a third parameter.
4261 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4262
4263 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
4264 __ li(t9, Operand(ExternalReference(&dispatcher,
4265 ExternalReference::BUILTIN_CALL,
4266 isolate())));
4267#endif
4268 // Call C function through t9 to conform ABI for PIC.
4269 __ Call(t9);
4270
4271 // Restore the stack pointer if needed.
4272 if (frame_alignment > kPointerSize) {
4273 __ mov(sp, s5);
4274 } else {
4275 __ Daddu(sp, sp, kCArgsSlotsSize);
4276 }
4277
4278 // Also pop ra to get Ret(0).
4279 __ MultiPop(kSavedRegs | ra.bit());
4280 __ Ret();
4281}
4282
4283
4284template<class T>
4285static void CreateArrayDispatch(MacroAssembler* masm,
4286 AllocationSiteOverrideMode mode) {
4287 if (mode == DISABLE_ALLOCATION_SITES) {
4288 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4289 __ TailCallStub(&stub);
4290 } else if (mode == DONT_OVERRIDE) {
4291 int last_index = GetSequenceIndexFromFastElementsKind(
4292 TERMINAL_FAST_ELEMENTS_KIND);
4293 for (int i = 0; i <= last_index; ++i) {
4294 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4295 T stub(masm->isolate(), kind);
4296 __ TailCallStub(&stub, eq, a3, Operand(kind));
4297 }
4298
4299 // If we reached this point there is a problem.
4300 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4301 } else {
4302 UNREACHABLE();
4303 }
4304}
4305
4306
4307static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4308 AllocationSiteOverrideMode mode) {
4309 // a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4310 // a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
4311 // a0 - number of arguments
4312 // a1 - constructor?
4313 // sp[0] - last argument
4314 Label normal_sequence;
4315 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004316 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4317 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4318 STATIC_ASSERT(FAST_ELEMENTS == 2);
4319 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4320 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4321 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004322
4323 // is the low bit set? If so, we are holey and that is good.
4324 __ And(at, a3, Operand(1));
4325 __ Branch(&normal_sequence, ne, at, Operand(zero_reg));
4326 }
4327 // look at the first argument
4328 __ ld(a5, MemOperand(sp, 0));
4329 __ Branch(&normal_sequence, eq, a5, Operand(zero_reg));
4330
4331 if (mode == DISABLE_ALLOCATION_SITES) {
4332 ElementsKind initial = GetInitialFastElementsKind();
4333 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4334
4335 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4336 holey_initial,
4337 DISABLE_ALLOCATION_SITES);
4338 __ TailCallStub(&stub_holey);
4339
4340 __ bind(&normal_sequence);
4341 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4342 initial,
4343 DISABLE_ALLOCATION_SITES);
4344 __ TailCallStub(&stub);
4345 } else if (mode == DONT_OVERRIDE) {
4346 // We are going to create a holey array, but our kind is non-holey.
4347 // Fix kind and retry (only if we have an allocation site in the slot).
4348 __ Daddu(a3, a3, Operand(1));
4349
4350 if (FLAG_debug_code) {
4351 __ ld(a5, FieldMemOperand(a2, 0));
4352 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
4353 __ Assert(eq, kExpectedAllocationSite, a5, Operand(at));
4354 }
4355
4356 // Save the resulting elements kind in type info. We can't just store a3
4357 // in the AllocationSite::transition_info field because elements kind is
4358 // restricted to a portion of the field...upper bits need to be left alone.
4359 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4360 __ ld(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4361 __ Daddu(a4, a4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
4362 __ sd(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4363
4364
4365 __ bind(&normal_sequence);
4366 int last_index = GetSequenceIndexFromFastElementsKind(
4367 TERMINAL_FAST_ELEMENTS_KIND);
4368 for (int i = 0; i <= last_index; ++i) {
4369 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4370 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4371 __ TailCallStub(&stub, eq, a3, Operand(kind));
4372 }
4373
4374 // If we reached this point there is a problem.
4375 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4376 } else {
4377 UNREACHABLE();
4378 }
4379}
4380
4381
4382template<class T>
4383static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4384 int to_index = GetSequenceIndexFromFastElementsKind(
4385 TERMINAL_FAST_ELEMENTS_KIND);
4386 for (int i = 0; i <= to_index; ++i) {
4387 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4388 T stub(isolate, kind);
4389 stub.GetCode();
4390 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4391 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4392 stub1.GetCode();
4393 }
4394 }
4395}
4396
4397
4398void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4399 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4400 isolate);
4401 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4402 isolate);
4403 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4404 isolate);
4405}
4406
4407
4408void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4409 Isolate* isolate) {
4410 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4411 for (int i = 0; i < 2; i++) {
4412 // For internal arrays we only need a few things.
4413 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4414 stubh1.GetCode();
4415 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4416 stubh2.GetCode();
4417 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4418 stubh3.GetCode();
4419 }
4420}
4421
4422
4423void ArrayConstructorStub::GenerateDispatchToArrayStub(
4424 MacroAssembler* masm,
4425 AllocationSiteOverrideMode mode) {
4426 if (argument_count() == ANY) {
4427 Label not_zero_case, not_one_case;
4428 __ And(at, a0, a0);
4429 __ Branch(&not_zero_case, ne, at, Operand(zero_reg));
4430 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4431
4432 __ bind(&not_zero_case);
4433 __ Branch(&not_one_case, gt, a0, Operand(1));
4434 CreateArrayDispatchOneArgument(masm, mode);
4435
4436 __ bind(&not_one_case);
4437 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4438 } else if (argument_count() == NONE) {
4439 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4440 } else if (argument_count() == ONE) {
4441 CreateArrayDispatchOneArgument(masm, mode);
4442 } else if (argument_count() == MORE_THAN_ONE) {
4443 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4444 } else {
4445 UNREACHABLE();
4446 }
4447}
4448
4449
4450void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4451 // ----------- S t a t e -------------
4452 // -- a0 : argc (only if argument_count() == ANY)
4453 // -- a1 : constructor
4454 // -- a2 : AllocationSite or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004455 // -- a3 : new target
4456 // -- sp[0] : last argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004457 // -----------------------------------
4458
4459 if (FLAG_debug_code) {
4460 // The array construct code is only set for the global and natives
4461 // builtin Array functions which always have maps.
4462
4463 // Initial map for the builtin Array function should be a map.
4464 __ ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4465 // Will both indicate a NULL and a Smi.
4466 __ SmiTst(a4, at);
4467 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
4468 at, Operand(zero_reg));
4469 __ GetObjectType(a4, a4, a5);
4470 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
4471 a5, Operand(MAP_TYPE));
4472
4473 // We should either have undefined in a2 or a valid AllocationSite
4474 __ AssertUndefinedOrAllocationSite(a2, a4);
4475 }
4476
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004477 // Enter the context of the Array function.
4478 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4479
4480 Label subclassing;
4481 __ Branch(&subclassing, ne, a1, Operand(a3));
4482
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004483 Label no_info;
4484 // Get the elements kind and case on that.
4485 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4486 __ Branch(&no_info, eq, a2, Operand(at));
4487
4488 __ ld(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4489 __ SmiUntag(a3);
4490 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4491 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
4492 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4493
4494 __ bind(&no_info);
4495 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004496
4497 // Subclassing.
4498 __ bind(&subclassing);
4499 switch (argument_count()) {
4500 case ANY:
4501 case MORE_THAN_ONE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01004502 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004503 __ sd(a1, MemOperand(at));
4504 __ li(at, Operand(3));
4505 __ Daddu(a0, a0, at);
4506 break;
4507 case NONE:
4508 __ sd(a1, MemOperand(sp, 0 * kPointerSize));
4509 __ li(a0, Operand(3));
4510 break;
4511 case ONE:
4512 __ sd(a1, MemOperand(sp, 1 * kPointerSize));
4513 __ li(a0, Operand(4));
4514 break;
4515 }
4516 __ Push(a3, a2);
4517 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004518}
4519
4520
4521void InternalArrayConstructorStub::GenerateCase(
4522 MacroAssembler* masm, ElementsKind kind) {
4523
4524 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4525 __ TailCallStub(&stub0, lo, a0, Operand(1));
4526
4527 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4528 __ TailCallStub(&stubN, hi, a0, Operand(1));
4529
4530 if (IsFastPackedElementsKind(kind)) {
4531 // We might need to create a holey array
4532 // look at the first argument.
4533 __ ld(at, MemOperand(sp, 0));
4534
4535 InternalArraySingleArgumentConstructorStub
4536 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4537 __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg));
4538 }
4539
4540 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4541 __ TailCallStub(&stub1);
4542}
4543
4544
4545void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4546 // ----------- S t a t e -------------
4547 // -- a0 : argc
4548 // -- a1 : constructor
4549 // -- sp[0] : return address
4550 // -- sp[4] : last argument
4551 // -----------------------------------
4552
4553 if (FLAG_debug_code) {
4554 // The array construct code is only set for the global and natives
4555 // builtin Array functions which always have maps.
4556
4557 // Initial map for the builtin Array function should be a map.
4558 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4559 // Will both indicate a NULL and a Smi.
4560 __ SmiTst(a3, at);
4561 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
4562 at, Operand(zero_reg));
4563 __ GetObjectType(a3, a3, a4);
4564 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
4565 a4, Operand(MAP_TYPE));
4566 }
4567
4568 // Figure out the right elements kind.
4569 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4570
4571 // Load the map's "bit field 2" into a3. We only need the first byte,
4572 // but the following bit field extraction takes care of that anyway.
4573 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
4574 // Retrieve elements_kind from bit field 2.
4575 __ DecodeField<Map::ElementsKindBits>(a3);
4576
4577 if (FLAG_debug_code) {
4578 Label done;
4579 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
4580 __ Assert(
4581 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
4582 a3, Operand(FAST_HOLEY_ELEMENTS));
4583 __ bind(&done);
4584 }
4585
4586 Label fast_elements_case;
4587 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS));
4588 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4589
4590 __ bind(&fast_elements_case);
4591 GenerateCase(masm, FAST_ELEMENTS);
4592}
4593
4594
Ben Murdoch097c5b22016-05-18 11:27:45 +01004595void FastNewObjectStub::Generate(MacroAssembler* masm) {
4596 // ----------- S t a t e -------------
4597 // -- a1 : target
4598 // -- a3 : new target
4599 // -- cp : context
4600 // -- ra : return address
4601 // -----------------------------------
4602 __ AssertFunction(a1);
4603 __ AssertReceiver(a3);
4604
4605 // Verify that the new target is a JSFunction.
4606 Label new_object;
4607 __ GetObjectType(a3, a2, a2);
4608 __ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE));
4609
4610 // Load the initial map and verify that it's in fact a map.
4611 __ ld(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
4612 __ JumpIfSmi(a2, &new_object);
4613 __ GetObjectType(a2, a0, a0);
4614 __ Branch(&new_object, ne, a0, Operand(MAP_TYPE));
4615
4616 // Fall back to runtime if the target differs from the new target's
4617 // initial map constructor.
4618 __ ld(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
4619 __ Branch(&new_object, ne, a0, Operand(a1));
4620
4621 // Allocate the JSObject on the heap.
4622 Label allocate, done_allocate;
4623 __ lbu(a4, FieldMemOperand(a2, Map::kInstanceSizeOffset));
4624 __ Allocate(a4, v0, a5, a0, &allocate, SIZE_IN_WORDS);
4625 __ bind(&done_allocate);
4626
4627 // Initialize the JSObject fields.
Ben Murdochc5610432016-08-08 18:44:38 +01004628 __ sd(a2, FieldMemOperand(v0, JSObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004629 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01004630 __ sd(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4631 __ sd(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004632 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004633 __ Daddu(a1, v0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004634
4635 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01004636 // -- v0 : result (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004637 // -- a1 : result fields (untagged)
4638 // -- a5 : result end (untagged)
4639 // -- a2 : initial map
4640 // -- cp : context
4641 // -- ra : return address
4642 // -----------------------------------
4643
4644 // Perform in-object slack tracking if requested.
4645 Label slack_tracking;
4646 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4647 __ lwu(a3, FieldMemOperand(a2, Map::kBitField3Offset));
4648 __ And(at, a3, Operand(Map::ConstructionCounter::kMask));
4649 __ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(zero_reg));
4650 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot.
4651 {
4652 // Initialize all in-object fields with undefined.
4653 __ InitializeFieldsWithFiller(a1, a5, a0);
Ben Murdochc5610432016-08-08 18:44:38 +01004654 __ Ret();
Ben Murdoch097c5b22016-05-18 11:27:45 +01004655 }
4656 __ bind(&slack_tracking);
4657 {
4658 // Decrease generous allocation count.
4659 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4660 __ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift));
4661 __ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
4662
4663 // Initialize the in-object fields with undefined.
4664 __ lbu(a4, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
4665 __ dsll(a4, a4, kPointerSizeLog2);
4666 __ Dsubu(a4, a5, a4);
4667 __ InitializeFieldsWithFiller(a1, a4, a0);
4668
4669 // Initialize the remaining (reserved) fields with one pointer filler map.
4670 __ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex);
4671 __ InitializeFieldsWithFiller(a1, a5, a0);
4672
4673 // Check if we can finalize the instance size.
4674 Label finalize;
4675 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4676 __ And(a3, a3, Operand(Map::ConstructionCounter::kMask));
Ben Murdochc5610432016-08-08 18:44:38 +01004677 __ Branch(&finalize, eq, a3, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004678 __ Ret();
4679
4680 // Finalize the instance size.
4681 __ bind(&finalize);
4682 {
4683 FrameScope scope(masm, StackFrame::INTERNAL);
4684 __ Push(v0, a2);
4685 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4686 __ Pop(v0);
4687 }
4688 __ Ret();
4689 }
4690
4691 // Fall back to %AllocateInNewSpace.
4692 __ bind(&allocate);
4693 {
4694 FrameScope scope(masm, StackFrame::INTERNAL);
4695 STATIC_ASSERT(kSmiTag == 0);
4696 STATIC_ASSERT(kSmiTagSize == 1);
4697 __ dsll(a4, a4, kPointerSizeLog2 + kSmiShiftSize + kSmiTagSize);
4698 __ SmiTag(a4);
4699 __ Push(a2, a4);
4700 __ CallRuntime(Runtime::kAllocateInNewSpace);
4701 __ Pop(a2);
4702 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004703 __ lbu(a5, FieldMemOperand(a2, Map::kInstanceSizeOffset));
4704 __ Dlsa(a5, v0, a5, kPointerSizeLog2);
Ben Murdochc5610432016-08-08 18:44:38 +01004705 STATIC_ASSERT(kHeapObjectTag == 1);
4706 __ Dsubu(a5, a5, Operand(kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004707 __ jmp(&done_allocate);
4708
4709 // Fall back to %NewObject.
4710 __ bind(&new_object);
4711 __ Push(a1, a3);
4712 __ TailCallRuntime(Runtime::kNewObject);
4713}
4714
4715
4716void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4717 // ----------- S t a t e -------------
4718 // -- a1 : function
4719 // -- cp : context
4720 // -- fp : frame pointer
4721 // -- ra : return address
4722 // -----------------------------------
4723 __ AssertFunction(a1);
4724
Ben Murdochc5610432016-08-08 18:44:38 +01004725 // Make a2 point to the JavaScript frame.
4726 __ mov(a2, fp);
4727 if (skip_stub_frame()) {
4728 // For Ignition we need to skip the handler/stub frame to reach the
4729 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004730 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004731 }
4732 if (FLAG_debug_code) {
4733 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004734 __ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004735 __ Branch(&ok, eq, a1, Operand(a3));
4736 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4737 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004738 }
4739
4740 // Check if we have rest parameters (only possible if we have an
4741 // arguments adaptor frame below the function frame).
4742 Label no_rest_parameters;
4743 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004744 __ ld(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004745 __ Branch(&no_rest_parameters, ne, a3,
4746 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4747
4748 // Check if the arguments adaptor frame contains more arguments than
4749 // specified by the function's internal formal parameter count.
4750 Label rest_parameters;
4751 __ SmiLoadUntag(
4752 a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4753 __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4754 __ lw(a1,
4755 FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset));
4756 __ Dsubu(a0, a0, Operand(a1));
4757 __ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
4758
4759 // Return an empty rest parameter array.
4760 __ bind(&no_rest_parameters);
4761 {
4762 // ----------- S t a t e -------------
4763 // -- cp : context
4764 // -- ra : return address
4765 // -----------------------------------
4766
4767 // Allocate an empty rest parameter array.
4768 Label allocate, done_allocate;
Ben Murdochc5610432016-08-08 18:44:38 +01004769 __ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004770 __ bind(&done_allocate);
4771
4772 // Setup the rest parameter array in v0.
4773 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1);
4774 __ sd(a1, FieldMemOperand(v0, JSArray::kMapOffset));
4775 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
4776 __ sd(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
4777 __ sd(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
4778 __ Move(a1, Smi::FromInt(0));
4779 __ Ret(USE_DELAY_SLOT);
4780 __ sd(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
4781 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4782
4783 // Fall back to %AllocateInNewSpace.
4784 __ bind(&allocate);
4785 {
4786 FrameScope scope(masm, StackFrame::INTERNAL);
4787 __ Push(Smi::FromInt(JSArray::kSize));
4788 __ CallRuntime(Runtime::kAllocateInNewSpace);
4789 }
4790 __ jmp(&done_allocate);
4791 }
4792
4793 __ bind(&rest_parameters);
4794 {
4795 // Compute the pointer to the first rest parameter (skippping the receiver).
4796 __ Dlsa(a2, a2, a0, kPointerSizeLog2);
4797 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
4798 1 * kPointerSize));
4799
4800 // ----------- S t a t e -------------
4801 // -- cp : context
4802 // -- a0 : number of rest parameters
4803 // -- a2 : pointer to first rest parameters
4804 // -- ra : return address
4805 // -----------------------------------
4806
4807 // Allocate space for the rest parameter array plus the backing store.
4808 Label allocate, done_allocate;
4809 __ li(a1, Operand(JSArray::kSize + FixedArray::kHeaderSize));
4810 __ Dlsa(a1, a1, a0, kPointerSizeLog2);
Ben Murdochc5610432016-08-08 18:44:38 +01004811 __ Allocate(a1, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004812 __ bind(&done_allocate);
4813
4814 // Compute arguments.length in a4.
4815 __ SmiTag(a4, a0);
4816
4817 // Setup the elements array in v0.
4818 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4819 __ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset));
4820 __ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset));
4821 __ Daddu(a3, v0, Operand(FixedArray::kHeaderSize));
4822 {
4823 Label loop, done_loop;
4824 __ Dlsa(a1, a3, a0, kPointerSizeLog2);
4825 __ bind(&loop);
4826 __ Branch(&done_loop, eq, a1, Operand(a3));
4827 __ ld(at, MemOperand(a2, 0 * kPointerSize));
4828 __ sd(at, FieldMemOperand(a3, 0 * kPointerSize));
4829 __ Dsubu(a2, a2, Operand(1 * kPointerSize));
4830 __ Daddu(a3, a3, Operand(1 * kPointerSize));
4831 __ Branch(&loop);
4832 __ bind(&done_loop);
4833 }
4834
4835 // Setup the rest parameter array in a3.
4836 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at);
4837 __ sd(at, FieldMemOperand(a3, JSArray::kMapOffset));
4838 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4839 __ sd(at, FieldMemOperand(a3, JSArray::kPropertiesOffset));
4840 __ sd(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
4841 __ sd(a4, FieldMemOperand(a3, JSArray::kLengthOffset));
4842 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4843 __ Ret(USE_DELAY_SLOT);
4844 __ mov(v0, a3); // In delay slot
4845
4846 // Fall back to %AllocateInNewSpace.
4847 __ bind(&allocate);
4848 {
4849 FrameScope scope(masm, StackFrame::INTERNAL);
4850 __ SmiTag(a0);
4851 __ SmiTag(a1);
4852 __ Push(a0, a2, a1);
4853 __ CallRuntime(Runtime::kAllocateInNewSpace);
4854 __ Pop(a0, a2);
4855 __ SmiUntag(a0);
4856 }
4857 __ jmp(&done_allocate);
4858 }
4859}
4860
4861
4862void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4863 // ----------- S t a t e -------------
4864 // -- a1 : function
4865 // -- cp : context
4866 // -- fp : frame pointer
4867 // -- ra : return address
4868 // -----------------------------------
4869 __ AssertFunction(a1);
4870
Ben Murdochc5610432016-08-08 18:44:38 +01004871 // Make t0 point to the JavaScript frame.
4872 __ mov(t0, fp);
4873 if (skip_stub_frame()) {
4874 // For Ignition we need to skip the handler/stub frame to reach the
4875 // JavaScript frame for the function.
4876 __ ld(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
4877 }
4878 if (FLAG_debug_code) {
4879 Label ok;
4880 __ ld(a3, MemOperand(t0, StandardFrameConstants::kFunctionOffset));
4881 __ Branch(&ok, eq, a1, Operand(a3));
4882 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4883 __ bind(&ok);
4884 }
4885
Ben Murdoch097c5b22016-05-18 11:27:45 +01004886 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4887 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4888 __ lw(a2,
4889 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004890 __ Lsa(a3, t0, a2, kPointerSizeLog2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004891 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
4892 __ SmiTag(a2);
4893
4894 // a1 : function
4895 // a2 : number of parameters (tagged)
4896 // a3 : parameters pointer
Ben Murdochc5610432016-08-08 18:44:38 +01004897 // t0 : Javascript frame pointer
Ben Murdoch097c5b22016-05-18 11:27:45 +01004898 // Registers used over whole function:
4899 // a5 : arguments count (tagged)
4900 // a6 : mapped parameter count (tagged)
4901
4902 // Check if the calling frame is an arguments adaptor frame.
4903 Label adaptor_frame, try_allocate, runtime;
Ben Murdochc5610432016-08-08 18:44:38 +01004904 __ ld(a4, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004905 __ ld(a0, MemOperand(a4, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004906 __ Branch(&adaptor_frame, eq, a0,
4907 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4908
4909 // No adaptor, parameter count = argument count.
4910 __ mov(a5, a2);
4911 __ Branch(USE_DELAY_SLOT, &try_allocate);
4912 __ mov(a6, a2); // In delay slot.
4913
4914 // We have an adaptor frame. Patch the parameters pointer.
4915 __ bind(&adaptor_frame);
4916 __ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
4917 __ SmiScale(t2, a5, kPointerSizeLog2);
4918 __ Daddu(a4, a4, Operand(t2));
4919 __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset));
4920
4921 // a5 = argument count (tagged)
4922 // a6 = parameter count (tagged)
4923 // Compute the mapped parameter count = min(a6, a5) in a6.
4924 __ mov(a6, a2);
4925 __ Branch(&try_allocate, le, a6, Operand(a5));
4926 __ mov(a6, a5);
4927
4928 __ bind(&try_allocate);
4929
4930 // Compute the sizes of backing store, parameter map, and arguments object.
4931 // 1. Parameter map, has 2 extra words containing context and backing store.
4932 const int kParameterMapHeaderSize =
4933 FixedArray::kHeaderSize + 2 * kPointerSize;
4934 // If there are no mapped parameters, we do not need the parameter_map.
4935 Label param_map_size;
4936 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
4937 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
4938 __ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
4939 __ SmiScale(t1, a6, kPointerSizeLog2);
4940 __ daddiu(t1, t1, kParameterMapHeaderSize);
4941 __ bind(&param_map_size);
4942
4943 // 2. Backing store.
4944 __ SmiScale(t2, a5, kPointerSizeLog2);
4945 __ Daddu(t1, t1, Operand(t2));
4946 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize));
4947
4948 // 3. Arguments object.
4949 __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize));
4950
4951 // Do the allocation of all three objects in one go.
Ben Murdochc5610432016-08-08 18:44:38 +01004952 __ Allocate(t1, v0, t1, a4, &runtime, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004953
4954 // v0 = address of new object(s) (tagged)
4955 // a2 = argument count (smi-tagged)
4956 // Get the arguments boilerplate from the current native context into a4.
4957 const int kNormalOffset =
4958 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
4959 const int kAliasedOffset =
4960 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
4961
4962 __ ld(a4, NativeContextMemOperand());
4963 Label skip2_ne, skip2_eq;
4964 __ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
4965 __ ld(a4, MemOperand(a4, kNormalOffset));
4966 __ bind(&skip2_ne);
4967
4968 __ Branch(&skip2_eq, eq, a6, Operand(zero_reg));
4969 __ ld(a4, MemOperand(a4, kAliasedOffset));
4970 __ bind(&skip2_eq);
4971
4972 // v0 = address of new object (tagged)
4973 // a2 = argument count (smi-tagged)
4974 // a4 = address of arguments map (tagged)
4975 // a6 = mapped parameter count (tagged)
4976 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
4977 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
4978 __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4979 __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset));
4980
4981 // Set up the callee in-object property.
4982 __ AssertNotSmi(a1);
4983 __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
4984
4985 // Use the length (smi tagged) and set that as an in-object property too.
4986 __ AssertSmi(a5);
4987 __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
4988
4989 // Set up the elements pointer in the allocated arguments object.
4990 // If we allocated a parameter map, a4 will point there, otherwise
4991 // it will point to the backing store.
4992 __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize));
4993 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
4994
4995 // v0 = address of new object (tagged)
4996 // a2 = argument count (tagged)
4997 // a4 = address of parameter map or backing store (tagged)
4998 // a6 = mapped parameter count (tagged)
4999 // Initialize parameter map. If there are no mapped arguments, we're done.
5000 Label skip_parameter_map;
5001 Label skip3;
5002 __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0)));
5003 // Move backing store address to a1, because it is
5004 // expected there when filling in the unmapped arguments.
5005 __ mov(a1, a4);
5006 __ bind(&skip3);
5007
5008 __ Branch(&skip_parameter_map, eq, a6, Operand(Smi::FromInt(0)));
5009
5010 __ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
5011 __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
5012 __ Daddu(a5, a6, Operand(Smi::FromInt(2)));
5013 __ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset));
5014 __ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize));
5015 __ SmiScale(t2, a6, kPointerSizeLog2);
5016 __ Daddu(a5, a4, Operand(t2));
5017 __ Daddu(a5, a5, Operand(kParameterMapHeaderSize));
5018 __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize));
5019
5020 // Copy the parameter slots and the holes in the arguments.
5021 // We need to fill in mapped_parameter_count slots. They index the context,
5022 // where parameters are stored in reverse order, at
5023 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
5024 // The mapped parameter thus need to get indices
5025 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
5026 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
5027 // We loop from right to left.
5028 Label parameters_loop, parameters_test;
5029 __ mov(a5, a6);
5030 __ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
5031 __ Dsubu(t1, t1, Operand(a6));
5032 __ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
5033 __ SmiScale(t2, a5, kPointerSizeLog2);
5034 __ Daddu(a1, a4, Operand(t2));
5035 __ Daddu(a1, a1, Operand(kParameterMapHeaderSize));
5036
5037 // a1 = address of backing store (tagged)
5038 // a4 = address of parameter map (tagged)
5039 // a0 = temporary scratch (a.o., for address calculation)
5040 // t1 = loop variable (tagged)
5041 // a7 = the hole value
5042 __ jmp(&parameters_test);
5043
5044 __ bind(&parameters_loop);
5045 __ Dsubu(a5, a5, Operand(Smi::FromInt(1)));
5046 __ SmiScale(a0, a5, kPointerSizeLog2);
5047 __ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
5048 __ Daddu(t2, a4, a0);
5049 __ sd(t1, MemOperand(t2));
5050 __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
5051 __ Daddu(t2, a1, a0);
5052 __ sd(a7, MemOperand(t2));
5053 __ Daddu(t1, t1, Operand(Smi::FromInt(1)));
5054 __ bind(&parameters_test);
5055 __ Branch(&parameters_loop, ne, a5, Operand(Smi::FromInt(0)));
5056
5057 // Restore t1 = argument count (tagged).
5058 __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
5059
5060 __ bind(&skip_parameter_map);
5061 // v0 = address of new object (tagged)
5062 // a1 = address of backing store (tagged)
5063 // a5 = argument count (tagged)
5064 // a6 = mapped parameter count (tagged)
5065 // t1 = scratch
5066 // Copy arguments header and remaining slots (if there are any).
5067 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
5068 __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset));
5069 __ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset));
5070
5071 Label arguments_loop, arguments_test;
5072 __ SmiScale(t2, a6, kPointerSizeLog2);
5073 __ Dsubu(a3, a3, Operand(t2));
5074 __ jmp(&arguments_test);
5075
5076 __ bind(&arguments_loop);
5077 __ Dsubu(a3, a3, Operand(kPointerSize));
5078 __ ld(a4, MemOperand(a3, 0));
5079 __ SmiScale(t2, a6, kPointerSizeLog2);
5080 __ Daddu(t1, a1, Operand(t2));
5081 __ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize));
5082 __ Daddu(a6, a6, Operand(Smi::FromInt(1)));
5083
5084 __ bind(&arguments_test);
5085 __ Branch(&arguments_loop, lt, a6, Operand(a5));
5086
5087 // Return.
5088 __ Ret();
5089
5090 // Do the runtime call to allocate the arguments object.
5091 // a5 = argument count (tagged)
5092 __ bind(&runtime);
5093 __ Push(a1, a3, a5);
5094 __ TailCallRuntime(Runtime::kNewSloppyArguments);
5095}
5096
5097
5098void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
5099 // ----------- S t a t e -------------
5100 // -- a1 : function
5101 // -- cp : context
5102 // -- fp : frame pointer
5103 // -- ra : return address
5104 // -----------------------------------
5105 __ AssertFunction(a1);
5106
Ben Murdochc5610432016-08-08 18:44:38 +01005107 // Make a2 point to the JavaScript frame.
5108 __ mov(a2, fp);
5109 if (skip_stub_frame()) {
5110 // For Ignition we need to skip the handler/stub frame to reach the
5111 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005112 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01005113 }
5114 if (FLAG_debug_code) {
5115 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01005116 __ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01005117 __ Branch(&ok, eq, a1, Operand(a3));
5118 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
5119 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005120 }
5121
5122 // Check if we have an arguments adaptor frame below the function frame.
5123 Label arguments_adaptor, arguments_done;
5124 __ ld(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01005125 __ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005126 __ Branch(&arguments_adaptor, eq, a0,
5127 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5128 {
5129 __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
5130 __ lw(a0,
5131 FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset));
5132 __ Dlsa(a2, a2, a0, kPointerSizeLog2);
5133 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
5134 1 * kPointerSize));
5135 }
5136 __ Branch(&arguments_done);
5137 __ bind(&arguments_adaptor);
5138 {
5139 __ SmiLoadUntag(
5140 a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
5141 __ Dlsa(a2, a3, a0, kPointerSizeLog2);
5142 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
5143 1 * kPointerSize));
5144 }
5145 __ bind(&arguments_done);
5146
5147 // ----------- S t a t e -------------
5148 // -- cp : context
5149 // -- a0 : number of rest parameters
5150 // -- a2 : pointer to first rest parameters
5151 // -- ra : return address
5152 // -----------------------------------
5153
5154 // Allocate space for the rest parameter array plus the backing store.
5155 Label allocate, done_allocate;
5156 __ li(a1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
5157 __ Dlsa(a1, a1, a0, kPointerSizeLog2);
Ben Murdochc5610432016-08-08 18:44:38 +01005158 __ Allocate(a1, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005159 __ bind(&done_allocate);
5160
5161 // Compute arguments.length in a4.
5162 __ SmiTag(a4, a0);
5163
5164 // Setup the elements array in v0.
5165 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
5166 __ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset));
5167 __ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset));
5168 __ Daddu(a3, v0, Operand(FixedArray::kHeaderSize));
5169 {
5170 Label loop, done_loop;
5171 __ Dlsa(a1, a3, a0, kPointerSizeLog2);
5172 __ bind(&loop);
5173 __ Branch(&done_loop, eq, a1, Operand(a3));
5174 __ ld(at, MemOperand(a2, 0 * kPointerSize));
5175 __ sd(at, FieldMemOperand(a3, 0 * kPointerSize));
5176 __ Dsubu(a2, a2, Operand(1 * kPointerSize));
5177 __ Daddu(a3, a3, Operand(1 * kPointerSize));
5178 __ Branch(&loop);
5179 __ bind(&done_loop);
5180 }
5181
5182 // Setup the strict arguments object in a3.
5183 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at);
5184 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset));
5185 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
5186 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset));
5187 __ sd(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset));
5188 __ sd(a4, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset));
5189 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
5190 __ Ret(USE_DELAY_SLOT);
5191 __ mov(v0, a3); // In delay slot
5192
5193 // Fall back to %AllocateInNewSpace.
5194 __ bind(&allocate);
5195 {
5196 FrameScope scope(masm, StackFrame::INTERNAL);
5197 __ SmiTag(a0);
5198 __ SmiTag(a1);
5199 __ Push(a0, a2, a1);
5200 __ CallRuntime(Runtime::kAllocateInNewSpace);
5201 __ Pop(a0, a2);
5202 __ SmiUntag(a0);
5203 }
5204 __ jmp(&done_allocate);
5205}
5206
5207
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005208void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
5209 Register context_reg = cp;
5210 Register slot_reg = a2;
5211 Register result_reg = v0;
5212 Label slow_case;
5213
5214 // Go up context chain to the script context.
5215 for (int i = 0; i < depth(); ++i) {
5216 __ ld(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
5217 context_reg = result_reg;
5218 }
5219
5220 // Load the PropertyCell value at the specified slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005221 __ Dlsa(at, context_reg, slot_reg, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005222 __ ld(result_reg, ContextMemOperand(at, 0));
5223 __ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
5224
5225 // Check that value is not the_hole.
5226 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5227 __ Branch(&slow_case, eq, result_reg, Operand(at));
5228 __ Ret();
5229
5230 // Fallback to the runtime.
5231 __ bind(&slow_case);
5232 __ SmiTag(slot_reg);
5233 __ Push(slot_reg);
5234 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5235}
5236
5237
5238void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5239 Register context_reg = cp;
5240 Register slot_reg = a2;
5241 Register value_reg = a0;
5242 Register cell_reg = a4;
5243 Register cell_value_reg = a5;
5244 Register cell_details_reg = a6;
5245 Label fast_heapobject_case, fast_smi_case, slow_case;
5246
5247 if (FLAG_debug_code) {
5248 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5249 __ Check(ne, kUnexpectedValue, value_reg, Operand(at));
5250 }
5251
5252 // Go up context chain to the script context.
5253 for (int i = 0; i < depth(); ++i) {
5254 __ ld(cell_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
5255 context_reg = cell_reg;
5256 }
5257
5258 // Load the PropertyCell at the specified slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005259 __ Dlsa(at, context_reg, slot_reg, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005260 __ ld(cell_reg, ContextMemOperand(at, 0));
5261
5262 // Load PropertyDetails for the cell (actually only the cell_type and kind).
5263 __ ld(cell_details_reg,
5264 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset));
5265 __ SmiUntag(cell_details_reg);
5266 __ And(cell_details_reg, cell_details_reg,
5267 PropertyDetails::PropertyCellTypeField::kMask |
5268 PropertyDetails::KindField::kMask |
5269 PropertyDetails::kAttributesReadOnlyMask);
5270
5271 // Check if PropertyCell holds mutable data.
5272 Label not_mutable_data;
5273 __ Branch(&not_mutable_data, ne, cell_details_reg,
5274 Operand(PropertyDetails::PropertyCellTypeField::encode(
5275 PropertyCellType::kMutable) |
5276 PropertyDetails::KindField::encode(kData)));
5277 __ JumpIfSmi(value_reg, &fast_smi_case);
5278 __ bind(&fast_heapobject_case);
5279 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5280 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5281 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs,
5282 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
5283 // RecordWriteField clobbers the value register, so we need to reload.
5284 __ Ret(USE_DELAY_SLOT);
5285 __ ld(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5286 __ bind(&not_mutable_data);
5287
5288 // Check if PropertyCell value matches the new value (relevant for Constant,
5289 // ConstantType and Undefined cells).
5290 Label not_same_value;
5291 __ ld(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5292 __ Branch(&not_same_value, ne, value_reg, Operand(cell_value_reg));
5293 // Make sure the PropertyCell is not marked READ_ONLY.
5294 __ And(at, cell_details_reg, PropertyDetails::kAttributesReadOnlyMask);
5295 __ Branch(&slow_case, ne, at, Operand(zero_reg));
5296 if (FLAG_debug_code) {
5297 Label done;
5298 // This can only be true for Constant, ConstantType and Undefined cells,
5299 // because we never store the_hole via this stub.
5300 __ Branch(&done, eq, cell_details_reg,
5301 Operand(PropertyDetails::PropertyCellTypeField::encode(
5302 PropertyCellType::kConstant) |
5303 PropertyDetails::KindField::encode(kData)));
5304 __ Branch(&done, eq, cell_details_reg,
5305 Operand(PropertyDetails::PropertyCellTypeField::encode(
5306 PropertyCellType::kConstantType) |
5307 PropertyDetails::KindField::encode(kData)));
5308 __ Check(eq, kUnexpectedValue, cell_details_reg,
5309 Operand(PropertyDetails::PropertyCellTypeField::encode(
5310 PropertyCellType::kUndefined) |
5311 PropertyDetails::KindField::encode(kData)));
5312 __ bind(&done);
5313 }
5314 __ Ret();
5315 __ bind(&not_same_value);
5316
5317 // Check if PropertyCell contains data with constant type (and is not
5318 // READ_ONLY).
5319 __ Branch(&slow_case, ne, cell_details_reg,
5320 Operand(PropertyDetails::PropertyCellTypeField::encode(
5321 PropertyCellType::kConstantType) |
5322 PropertyDetails::KindField::encode(kData)));
5323
5324 // Now either both old and new values must be SMIs or both must be heap
5325 // objects with same map.
5326 Label value_is_heap_object;
5327 __ JumpIfNotSmi(value_reg, &value_is_heap_object);
5328 __ JumpIfNotSmi(cell_value_reg, &slow_case);
5329 // Old and new values are SMIs, no need for a write barrier here.
5330 __ bind(&fast_smi_case);
5331 __ Ret(USE_DELAY_SLOT);
5332 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5333 __ bind(&value_is_heap_object);
5334 __ JumpIfSmi(cell_value_reg, &slow_case);
5335 Register cell_value_map_reg = cell_value_reg;
5336 __ ld(cell_value_map_reg,
5337 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
5338 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
5339 FieldMemOperand(value_reg, HeapObject::kMapOffset));
5340
5341 // Fallback to the runtime.
5342 __ bind(&slow_case);
5343 __ SmiTag(slot_reg);
5344 __ Push(slot_reg, value_reg);
5345 __ TailCallRuntime(is_strict(language_mode())
5346 ? Runtime::kStoreGlobalViaContext_Strict
5347 : Runtime::kStoreGlobalViaContext_Sloppy);
5348}
5349
5350
5351static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5352 int64_t offset = (ref0.address() - ref1.address());
5353 DCHECK(static_cast<int>(offset) == offset);
5354 return static_cast<int>(offset);
5355}
5356
5357
5358// Calls an API function. Allocates HandleScope, extracts returned value
5359// from handle and propagates exceptions. Restores context. stack_space
5360// - space to be unwound on exit (includes the call JS arguments space and
5361// the additional space allocated for the fast call).
5362static void CallApiFunctionAndReturn(
5363 MacroAssembler* masm, Register function_address,
5364 ExternalReference thunk_ref, int stack_space, int32_t stack_space_offset,
5365 MemOperand return_value_operand, MemOperand* context_restore_operand) {
5366 Isolate* isolate = masm->isolate();
5367 ExternalReference next_address =
5368 ExternalReference::handle_scope_next_address(isolate);
5369 const int kNextOffset = 0;
5370 const int kLimitOffset = AddressOffset(
5371 ExternalReference::handle_scope_limit_address(isolate), next_address);
5372 const int kLevelOffset = AddressOffset(
5373 ExternalReference::handle_scope_level_address(isolate), next_address);
5374
5375 DCHECK(function_address.is(a1) || function_address.is(a2));
5376
5377 Label profiler_disabled;
5378 Label end_profiler_check;
5379 __ li(t9, Operand(ExternalReference::is_profiling_address(isolate)));
5380 __ lb(t9, MemOperand(t9, 0));
5381 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
5382
5383 // Additional parameter is the address of the actual callback.
5384 __ li(t9, Operand(thunk_ref));
5385 __ jmp(&end_profiler_check);
5386
5387 __ bind(&profiler_disabled);
5388 __ mov(t9, function_address);
5389 __ bind(&end_profiler_check);
5390
5391 // Allocate HandleScope in callee-save registers.
5392 __ li(s3, Operand(next_address));
5393 __ ld(s0, MemOperand(s3, kNextOffset));
5394 __ ld(s1, MemOperand(s3, kLimitOffset));
5395 __ lw(s2, MemOperand(s3, kLevelOffset));
5396 __ Addu(s2, s2, Operand(1));
5397 __ sw(s2, MemOperand(s3, kLevelOffset));
5398
5399 if (FLAG_log_timer_events) {
5400 FrameScope frame(masm, StackFrame::MANUAL);
5401 __ PushSafepointRegisters();
5402 __ PrepareCallCFunction(1, a0);
5403 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5404 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5405 1);
5406 __ PopSafepointRegisters();
5407 }
5408
5409 // Native call returns to the DirectCEntry stub which redirects to the
5410 // return address pushed on stack (could have moved after GC).
5411 // DirectCEntry stub itself is generated early and never moves.
5412 DirectCEntryStub stub(isolate);
5413 stub.GenerateCall(masm, t9);
5414
5415 if (FLAG_log_timer_events) {
5416 FrameScope frame(masm, StackFrame::MANUAL);
5417 __ PushSafepointRegisters();
5418 __ PrepareCallCFunction(1, a0);
5419 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5420 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5421 1);
5422 __ PopSafepointRegisters();
5423 }
5424
5425 Label promote_scheduled_exception;
5426 Label delete_allocated_handles;
5427 Label leave_exit_frame;
5428 Label return_value_loaded;
5429
5430 // Load value from ReturnValue.
5431 __ ld(v0, return_value_operand);
5432 __ bind(&return_value_loaded);
5433
5434 // No more valid handles (the result handle was the last one). Restore
5435 // previous handle scope.
5436 __ sd(s0, MemOperand(s3, kNextOffset));
5437 if (__ emit_debug_code()) {
5438 __ lw(a1, MemOperand(s3, kLevelOffset));
5439 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
5440 }
5441 __ Subu(s2, s2, Operand(1));
5442 __ sw(s2, MemOperand(s3, kLevelOffset));
5443 __ ld(at, MemOperand(s3, kLimitOffset));
5444 __ Branch(&delete_allocated_handles, ne, s1, Operand(at));
5445
5446 // Leave the API exit frame.
5447 __ bind(&leave_exit_frame);
5448
5449 bool restore_context = context_restore_operand != NULL;
5450 if (restore_context) {
5451 __ ld(cp, *context_restore_operand);
5452 }
5453 if (stack_space_offset != kInvalidStackOffset) {
5454 DCHECK(kCArgsSlotsSize == 0);
5455 __ ld(s0, MemOperand(sp, stack_space_offset));
5456 } else {
5457 __ li(s0, Operand(stack_space));
5458 }
5459 __ LeaveExitFrame(false, s0, !restore_context, NO_EMIT_RETURN,
5460 stack_space_offset != kInvalidStackOffset);
5461
5462 // Check if the function scheduled an exception.
5463 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
5464 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
5465 __ ld(a5, MemOperand(at));
5466 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
5467
5468 __ Ret();
5469
5470 // Re-throw by promoting a scheduled exception.
5471 __ bind(&promote_scheduled_exception);
5472 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5473
5474 // HandleScope limit has changed. Delete allocated extensions.
5475 __ bind(&delete_allocated_handles);
5476 __ sd(s1, MemOperand(s3, kLimitOffset));
5477 __ mov(s0, v0);
5478 __ mov(a0, v0);
5479 __ PrepareCallCFunction(1, s1);
5480 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5481 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5482 1);
5483 __ mov(v0, s0);
5484 __ jmp(&leave_exit_frame);
5485}
5486
Ben Murdochda12d292016-06-02 14:46:10 +01005487void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005488 // ----------- S t a t e -------------
5489 // -- a0 : callee
5490 // -- a4 : call_data
5491 // -- a2 : holder
5492 // -- a1 : api_function_address
5493 // -- cp : context
5494 // --
5495 // -- sp[0] : last argument
5496 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005497 // -- sp[(argc - 1)* 8] : first argument
5498 // -- sp[argc * 8] : receiver
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005499 // -----------------------------------
5500
5501 Register callee = a0;
5502 Register call_data = a4;
5503 Register holder = a2;
5504 Register api_function_address = a1;
5505 Register context = cp;
5506
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005507 typedef FunctionCallbackArguments FCA;
5508
5509 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5510 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5511 STATIC_ASSERT(FCA::kDataIndex == 4);
5512 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5513 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5514 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5515 STATIC_ASSERT(FCA::kHolderIndex == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01005516 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
5517 STATIC_ASSERT(FCA::kArgsLength == 8);
5518
5519 // new target
5520 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005521
5522 // Save context, callee and call data.
5523 __ Push(context, callee, call_data);
Ben Murdochda12d292016-06-02 14:46:10 +01005524 if (!is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005525 // Load context from callee.
5526 __ ld(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5527 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005528
5529 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005530 if (!call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005531 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5532 }
5533 // Push return value and default return value.
5534 __ Push(scratch, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005535 __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005536 // Push isolate and holder.
5537 __ Push(scratch, holder);
5538
5539 // Prepare arguments.
5540 __ mov(scratch, sp);
5541
5542 // Allocate the v8::Arguments structure in the arguments' space since
5543 // it's not controlled by GC.
Ben Murdochc5610432016-08-08 18:44:38 +01005544 const int kApiStackSpace = 3;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005545
5546 FrameScope frame_scope(masm, StackFrame::MANUAL);
5547 __ EnterExitFrame(false, kApiStackSpace);
5548
5549 DCHECK(!api_function_address.is(a0) && !scratch.is(a0));
5550 // a0 = FunctionCallbackInfo&
5551 // Arguments is after the return address.
5552 __ Daddu(a0, sp, Operand(1 * kPointerSize));
5553 // FunctionCallbackInfo::implicit_args_
5554 __ sd(scratch, MemOperand(a0, 0 * kPointerSize));
Ben Murdochda12d292016-06-02 14:46:10 +01005555 // FunctionCallbackInfo::values_
5556 __ Daddu(at, scratch,
5557 Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize));
5558 __ sd(at, MemOperand(a0, 1 * kPointerSize));
5559 // FunctionCallbackInfo::length_ = argc
5560 // Stored as int field, 32-bit integers within struct on stack always left
5561 // justified by n64 ABI.
5562 __ li(at, Operand(argc()));
5563 __ sw(at, MemOperand(a0, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005564
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005565 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005566 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005567
5568 AllowExternalCallThatCantCauseGC scope(masm);
5569 MemOperand context_restore_operand(
5570 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5571 // Stores return the first js argument.
5572 int return_value_offset = 0;
Ben Murdochda12d292016-06-02 14:46:10 +01005573 if (is_store()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005574 return_value_offset = 2 + FCA::kArgsLength;
5575 } else {
5576 return_value_offset = 2 + FCA::kReturnValueOffset;
5577 }
5578 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005579 int stack_space = 0;
Ben Murdochc5610432016-08-08 18:44:38 +01005580 int32_t stack_space_offset = 3 * kPointerSize;
Ben Murdochda12d292016-06-02 14:46:10 +01005581 stack_space = argc() + FCA::kArgsLength + 1;
Ben Murdochc5610432016-08-08 18:44:38 +01005582 // TODO(adamk): Why are we clobbering this immediately?
Ben Murdochda12d292016-06-02 14:46:10 +01005583 stack_space_offset = kInvalidStackOffset;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005584 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
5585 stack_space_offset, return_value_operand,
5586 &context_restore_operand);
5587}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005588
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005589
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005590void CallApiGetterStub::Generate(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01005591 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
5592 // name below the exit frame to make GC aware of them.
5593 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
5594 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
5595 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
5596 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
5597 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
5598 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
5599 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
5600 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005601
Ben Murdochc5610432016-08-08 18:44:38 +01005602 Register receiver = ApiGetterDescriptor::ReceiverRegister();
5603 Register holder = ApiGetterDescriptor::HolderRegister();
5604 Register callback = ApiGetterDescriptor::CallbackRegister();
5605 Register scratch = a4;
5606 DCHECK(!AreAliased(receiver, holder, callback, scratch));
5607
5608 Register api_function_address = a2;
5609
5610 // Here and below +1 is for name() pushed after the args_ array.
5611 typedef PropertyCallbackArguments PCA;
5612 __ Dsubu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize);
5613 __ sd(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize));
5614 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
5615 __ sd(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize));
5616 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5617 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
5618 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
5619 kPointerSize));
5620 __ li(scratch, Operand(ExternalReference::isolate_address(isolate())));
5621 __ sd(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
5622 __ sd(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
5623 // should_throw_on_error -> false
5624 DCHECK(Smi::FromInt(0) == nullptr);
5625 __ sd(zero_reg,
5626 MemOperand(sp, (PCA::kShouldThrowOnErrorIndex + 1) * kPointerSize));
5627 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
5628 __ sd(scratch, MemOperand(sp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005629
Ben Murdoch097c5b22016-05-18 11:27:45 +01005630 // v8::PropertyCallbackInfo::args_ array and name handle.
5631 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5632
5633 // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
5634 __ mov(a0, sp); // a0 = Handle<Name>
5635 __ Daddu(a1, a0, Operand(1 * kPointerSize)); // a1 = v8::PCI::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005636
5637 const int kApiStackSpace = 1;
5638 FrameScope frame_scope(masm, StackFrame::MANUAL);
5639 __ EnterExitFrame(false, kApiStackSpace);
5640
Ben Murdoch097c5b22016-05-18 11:27:45 +01005641 // Create v8::PropertyCallbackInfo object on the stack and initialize
5642 // it's args_ field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005643 __ sd(a1, MemOperand(sp, 1 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005644 __ Daddu(a1, sp, Operand(1 * kPointerSize));
5645 // a1 = v8::PropertyCallbackInfo&
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005646
5647 ExternalReference thunk_ref =
5648 ExternalReference::invoke_accessor_getter_callback(isolate());
Ben Murdoch097c5b22016-05-18 11:27:45 +01005649
Ben Murdochc5610432016-08-08 18:44:38 +01005650 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
5651 __ ld(api_function_address,
5652 FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
5653
Ben Murdoch097c5b22016-05-18 11:27:45 +01005654 // +3 is to skip prolog, return address and name handle.
5655 MemOperand return_value_operand(
5656 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005657 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5658 kStackUnwindSpace, kInvalidStackOffset,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005659 return_value_operand, NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005660}
5661
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005662#undef __
5663
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005664} // namespace internal
5665} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005666
5667#endif // V8_TARGET_ARCH_MIPS64