blob: 89eff90d12665acdc2384b8600aa5756bc68714e [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS64
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/code-stubs.h"
Ben Murdochda12d292016-06-02 14:46:10 +01008#include "src/api-arguments.h"
9#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/codegen.h"
11#include "src/ic/handler-compiler.h"
12#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015#include "src/mips64/code-stubs-mips64.h"
16#include "src/regexp/jsregexp.h"
17#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040018#include "src/runtime/runtime.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000019
20namespace v8 {
21namespace internal {
22
Ben Murdoch61f157c2016-09-16 13:49:30 +010023#define __ ACCESS_MASM(masm)
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024
Ben Murdoch61f157c2016-09-16 13:49:30 +010025void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
26 __ dsll(t9, a0, kPointerSizeLog2);
27 __ Daddu(t9, sp, t9);
28 __ sd(a1, MemOperand(t9, 0));
29 __ Push(a1);
30 __ Push(a2);
31 __ Daddu(a0, a0, 3);
32 __ TailCallRuntime(Runtime::kNewArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033}
34
Ben Murdochda12d292016-06-02 14:46:10 +010035void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
36 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
37 descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
38}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039
Ben Murdoch61f157c2016-09-16 13:49:30 +010040void FastFunctionBindStub::InitializeDescriptor(
Ben Murdochb8a8cc12014-11-26 15:28:44 +000041 CodeStubDescriptor* descriptor) {
Ben Murdoch61f157c2016-09-16 13:49:30 +010042 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
43 descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044}
45
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +010047 Condition cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048static void EmitSmiNonsmiComparison(MacroAssembler* masm,
49 Register lhs,
50 Register rhs,
51 Label* rhs_not_nan,
52 Label* slow,
53 bool strict);
54static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
55 Register lhs,
56 Register rhs);
57
58
59void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
60 ExternalReference miss) {
61 // Update the static counter each time a new code stub is generated.
62 isolate()->counters()->code_stubs()->Increment();
63
64 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000065 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 {
67 // Call the runtime system in a fresh internal frame.
68 FrameScope scope(masm, StackFrame::INTERNAL);
69 DCHECK((param_count == 0) ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 a0.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 // Push arguments, adjust sp.
72 __ Dsubu(sp, sp, Operand(param_count * kPointerSize));
73 for (int i = 0; i < param_count; ++i) {
74 // Store argument to stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000075 __ sd(descriptor.GetRegisterParameter(i),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 MemOperand(sp, (param_count - 1 - i) * kPointerSize));
77 }
78 __ CallExternalReference(miss, param_count);
79 }
80
81 __ Ret();
82}
83
84
85void DoubleToIStub::Generate(MacroAssembler* masm) {
86 Label out_of_range, only_low, negate, done;
87 Register input_reg = source();
88 Register result_reg = destination();
89
90 int double_offset = offset();
91 // Account for saved regs if input is sp.
92 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
93
94 Register scratch =
95 GetRegisterThatIsNotOneOf(input_reg, result_reg);
96 Register scratch2 =
97 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
98 Register scratch3 =
99 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch2);
100 DoubleRegister double_scratch = kLithiumScratchDouble;
101
102 __ Push(scratch, scratch2, scratch3);
103 if (!skip_fastpath()) {
104 // Load double input.
105 __ ldc1(double_scratch, MemOperand(input_reg, double_offset));
106
107 // Clear cumulative exception flags and save the FCSR.
108 __ cfc1(scratch2, FCSR);
109 __ ctc1(zero_reg, FCSR);
110
111 // Try a conversion to a signed integer.
112 __ Trunc_w_d(double_scratch, double_scratch);
113 // Move the converted value into the result register.
114 __ mfc1(scratch3, double_scratch);
115
116 // Retrieve and restore the FCSR.
117 __ cfc1(scratch, FCSR);
118 __ ctc1(scratch2, FCSR);
119
120 // Check for overflow and NaNs.
121 __ And(
122 scratch, scratch,
123 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask
124 | kFCSRInvalidOpFlagMask);
125 // If we had no exceptions then set result_reg and we are done.
126 Label error;
127 __ Branch(&error, ne, scratch, Operand(zero_reg));
128 __ Move(result_reg, scratch3);
129 __ Branch(&done);
130 __ bind(&error);
131 }
132
133 // Load the double value and perform a manual truncation.
134 Register input_high = scratch2;
135 Register input_low = scratch3;
136
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000137 __ lw(input_low,
138 MemOperand(input_reg, double_offset + Register::kMantissaOffset));
139 __ lw(input_high,
140 MemOperand(input_reg, double_offset + Register::kExponentOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000141
142 Label normal_exponent, restore_sign;
143 // Extract the biased exponent in result.
144 __ Ext(result_reg,
145 input_high,
146 HeapNumber::kExponentShift,
147 HeapNumber::kExponentBits);
148
149 // Check for Infinity and NaNs, which should return 0.
150 __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
151 __ Movz(result_reg, zero_reg, scratch);
152 __ Branch(&done, eq, scratch, Operand(zero_reg));
153
154 // Express exponent as delta to (number of mantissa bits + 31).
155 __ Subu(result_reg,
156 result_reg,
157 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
158
159 // If the delta is strictly positive, all bits would be shifted away,
160 // which means that we can return 0.
161 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
162 __ mov(result_reg, zero_reg);
163 __ Branch(&done);
164
165 __ bind(&normal_exponent);
166 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
167 // Calculate shift.
168 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
169
170 // Save the sign.
171 Register sign = result_reg;
172 result_reg = no_reg;
173 __ And(sign, input_high, Operand(HeapNumber::kSignMask));
174
175 // On ARM shifts > 31 bits are valid and will result in zero. On MIPS we need
176 // to check for this specific case.
177 Label high_shift_needed, high_shift_done;
178 __ Branch(&high_shift_needed, lt, scratch, Operand(32));
179 __ mov(input_high, zero_reg);
180 __ Branch(&high_shift_done);
181 __ bind(&high_shift_needed);
182
183 // Set the implicit 1 before the mantissa part in input_high.
184 __ Or(input_high,
185 input_high,
186 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
187 // Shift the mantissa bits to the correct position.
188 // We don't need to clear non-mantissa bits as they will be shifted away.
189 // If they weren't, it would mean that the answer is in the 32bit range.
190 __ sllv(input_high, input_high, scratch);
191
192 __ bind(&high_shift_done);
193
194 // Replace the shifted bits with bits from the lower mantissa word.
195 Label pos_shift, shift_done;
196 __ li(at, 32);
197 __ subu(scratch, at, scratch);
198 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
199
200 // Negate scratch.
201 __ Subu(scratch, zero_reg, scratch);
202 __ sllv(input_low, input_low, scratch);
203 __ Branch(&shift_done);
204
205 __ bind(&pos_shift);
206 __ srlv(input_low, input_low, scratch);
207
208 __ bind(&shift_done);
209 __ Or(input_high, input_high, Operand(input_low));
210 // Restore sign if necessary.
211 __ mov(scratch, sign);
212 result_reg = sign;
213 sign = no_reg;
214 __ Subu(result_reg, zero_reg, input_high);
215 __ Movz(result_reg, input_high, scratch);
216
217 __ bind(&done);
218
219 __ Pop(scratch, scratch2, scratch3);
220 __ Ret();
221}
222
223
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224// Handle the case where the lhs and rhs are the same object.
225// Equality is almost reflexive (everything but NaN), so this is a test
226// for "identity and not NaN".
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000227static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100228 Condition cc) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000229 Label not_identical;
230 Label heap_number, return_equal;
231 Register exp_mask_reg = t1;
232
233 __ Branch(&not_identical, ne, a0, Operand(a1));
234
235 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask));
236
237 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
238 // so we do the second best thing - test it ourselves.
239 // They are both equal and they are not both Smis so both of them are not
240 // Smis. If it's not a heap number, then return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000241 __ GetObjectType(a0, t0, t0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 if (cc == less || cc == greater) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000243 // Call runtime on identical JSObjects.
244 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE));
245 // Call runtime on identical symbols since we need to throw a TypeError.
246 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE));
247 // Call runtime on identical SIMD values since we must throw a TypeError.
248 __ Branch(slow, eq, t0, Operand(SIMD128_VALUE_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 __ Branch(&heap_number, eq, t0, Operand(HEAP_NUMBER_TYPE));
251 // Comparing JS objects with <=, >= is complicated.
252 if (cc != eq) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000253 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE));
254 // Call runtime on identical symbols since we need to throw a TypeError.
255 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE));
256 // Call runtime on identical SIMD values since we must throw a TypeError.
257 __ Branch(slow, eq, t0, Operand(SIMD128_VALUE_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 // Normally here we fall through to return_equal, but undefined is
259 // special: (undefined == undefined) == true, but
260 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
261 if (cc == less_equal || cc == greater_equal) {
262 __ Branch(&return_equal, ne, t0, Operand(ODDBALL_TYPE));
263 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
264 __ Branch(&return_equal, ne, a0, Operand(a6));
265 DCHECK(is_int16(GREATER) && is_int16(LESS));
266 __ Ret(USE_DELAY_SLOT);
267 if (cc == le) {
268 // undefined <= undefined should fail.
269 __ li(v0, Operand(GREATER));
270 } else {
271 // undefined >= undefined should fail.
272 __ li(v0, Operand(LESS));
273 }
274 }
275 }
276 }
277
278 __ bind(&return_equal);
279 DCHECK(is_int16(GREATER) && is_int16(LESS));
280 __ Ret(USE_DELAY_SLOT);
281 if (cc == less) {
282 __ li(v0, Operand(GREATER)); // Things aren't less than themselves.
283 } else if (cc == greater) {
284 __ li(v0, Operand(LESS)); // Things aren't greater than themselves.
285 } else {
286 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves.
287 }
288 // For less and greater we don't have to check for NaN since the result of
289 // x < x is false regardless. For the others here is some code to check
290 // for NaN.
291 if (cc != lt && cc != gt) {
292 __ bind(&heap_number);
293 // It is a heap number, so return non-equal if it's NaN and equal if it's
294 // not NaN.
295
296 // The representation of NaN values has all exponent bits (52..62) set,
297 // and not all mantissa bits (0..51) clear.
298 // Read top bits of double representation (second word of value).
299 __ lwu(a6, FieldMemOperand(a0, HeapNumber::kExponentOffset));
300 // Test that exponent bits are all set.
301 __ And(a7, a6, Operand(exp_mask_reg));
302 // If all bits not set (ne cond), then not a NaN, objects are equal.
303 __ Branch(&return_equal, ne, a7, Operand(exp_mask_reg));
304
305 // Shift out flag and all exponent bits, retaining only mantissa.
306 __ sll(a6, a6, HeapNumber::kNonMantissaBitsInTopWord);
307 // Or with all low-bits of mantissa.
308 __ lwu(a7, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
309 __ Or(v0, a7, Operand(a6));
310 // For equal we already have the right value in v0: Return zero (equal)
311 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
312 // not (it's a NaN). For <= and >= we need to load v0 with the failing
313 // value if it's a NaN.
314 if (cc != eq) {
315 // All-zero means Infinity means equal.
316 __ Ret(eq, v0, Operand(zero_reg));
317 DCHECK(is_int16(GREATER) && is_int16(LESS));
318 __ Ret(USE_DELAY_SLOT);
319 if (cc == le) {
320 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail.
321 } else {
322 __ li(v0, Operand(LESS)); // NaN >= NaN should fail.
323 }
324 }
325 }
326 // No fall through here.
327
328 __ bind(&not_identical);
329}
330
331
332static void EmitSmiNonsmiComparison(MacroAssembler* masm,
333 Register lhs,
334 Register rhs,
335 Label* both_loaded_as_doubles,
336 Label* slow,
337 bool strict) {
338 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
339 (lhs.is(a1) && rhs.is(a0)));
340
341 Label lhs_is_smi;
342 __ JumpIfSmi(lhs, &lhs_is_smi);
343 // Rhs is a Smi.
344 // Check whether the non-smi is a heap number.
345 __ GetObjectType(lhs, t0, t0);
346 if (strict) {
347 // If lhs was not a number and rhs was a Smi then strict equality cannot
348 // succeed. Return non-equal (lhs is already not zero).
349 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE));
350 __ mov(v0, lhs);
351 } else {
352 // Smi compared non-strictly with a non-Smi non-heap-number. Call
353 // the runtime.
354 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE));
355 }
356 // Rhs is a smi, lhs is a number.
357 // Convert smi rhs to double.
358 __ SmiUntag(at, rhs);
359 __ mtc1(at, f14);
360 __ cvt_d_w(f14, f14);
361 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
362
363 // We now have both loaded as doubles.
364 __ jmp(both_loaded_as_doubles);
365
366 __ bind(&lhs_is_smi);
367 // Lhs is a Smi. Check whether the non-smi is a heap number.
368 __ GetObjectType(rhs, t0, t0);
369 if (strict) {
370 // If lhs was not a number and rhs was a Smi then strict equality cannot
371 // succeed. Return non-equal.
372 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE));
373 __ li(v0, Operand(1));
374 } else {
375 // Smi compared non-strictly with a non-Smi non-heap-number. Call
376 // the runtime.
377 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE));
378 }
379
380 // Lhs is a smi, rhs is a number.
381 // Convert smi lhs to double.
382 __ SmiUntag(at, lhs);
383 __ mtc1(at, f12);
384 __ cvt_d_w(f12, f12);
385 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
386 // Fall through to both_loaded_as_doubles.
387}
388
389
390static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
391 Register lhs,
392 Register rhs) {
393 // If either operand is a JS object or an oddball value, then they are
394 // not equal since their pointers are different.
395 // There is no test for undetectability in strict equality.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000396 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000397 Label first_non_object;
398 // Get the type of the first operand into a2 and compare it with
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000399 // FIRST_JS_RECEIVER_TYPE.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400 __ GetObjectType(lhs, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000401 __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000402
403 // Return non-zero.
404 Label return_not_equal;
405 __ bind(&return_not_equal);
406 __ Ret(USE_DELAY_SLOT);
407 __ li(v0, Operand(1));
408
409 __ bind(&first_non_object);
410 // Check for oddballs: true, false, null, undefined.
411 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
412
413 __ GetObjectType(rhs, a3, a3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000414 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000415
416 // Check for oddballs: true, false, null, undefined.
417 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
418
419 // Now that we have the types we might as well check for
420 // internalized-internalized.
421 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
422 __ Or(a2, a2, Operand(a3));
423 __ And(at, a2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
424 __ Branch(&return_not_equal, eq, at, Operand(zero_reg));
425}
426
427
428static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
429 Register lhs,
430 Register rhs,
431 Label* both_loaded_as_doubles,
432 Label* not_heap_numbers,
433 Label* slow) {
434 __ GetObjectType(lhs, a3, a2);
435 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE));
436 __ ld(a2, FieldMemOperand(rhs, HeapObject::kMapOffset));
437 // If first was a heap number & second wasn't, go to slow case.
438 __ Branch(slow, ne, a3, Operand(a2));
439
440 // Both are heap numbers. Load them up then jump to the code we have
441 // for that.
442 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
443 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
444
445 __ jmp(both_loaded_as_doubles);
446}
447
448
449// Fast negative check for internalized-to-internalized equality.
450static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100451 Register lhs, Register rhs,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 Label* possible_strings,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100453 Label* runtime_call) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000454 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
455 (lhs.is(a1) && rhs.is(a0)));
456
457 // a2 is object type of rhs.
Ben Murdochda12d292016-06-02 14:46:10 +0100458 Label object_test, return_equal, return_unequal, undetectable;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000459 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
460 __ And(at, a2, Operand(kIsNotStringMask));
461 __ Branch(&object_test, ne, at, Operand(zero_reg));
462 __ And(at, a2, Operand(kIsNotInternalizedMask));
463 __ Branch(possible_strings, ne, at, Operand(zero_reg));
464 __ GetObjectType(rhs, a3, a3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100465 __ Branch(runtime_call, ge, a3, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 __ And(at, a3, Operand(kIsNotInternalizedMask));
467 __ Branch(possible_strings, ne, at, Operand(zero_reg));
468
Ben Murdoch097c5b22016-05-18 11:27:45 +0100469 // Both are internalized. We already checked they weren't the same pointer so
470 // they are not equal. Return non-equal by returning the non-zero object
471 // pointer in v0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 __ Ret(USE_DELAY_SLOT);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100473 __ mov(v0, a0); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474
475 __ bind(&object_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100476 __ ld(a2, FieldMemOperand(lhs, HeapObject::kMapOffset));
477 __ ld(a3, FieldMemOperand(rhs, HeapObject::kMapOffset));
478 __ lbu(t0, FieldMemOperand(a2, Map::kBitFieldOffset));
479 __ lbu(t1, FieldMemOperand(a3, Map::kBitFieldOffset));
480 __ And(at, t0, Operand(1 << Map::kIsUndetectable));
481 __ Branch(&undetectable, ne, at, Operand(zero_reg));
482 __ And(at, t1, Operand(1 << Map::kIsUndetectable));
483 __ Branch(&return_unequal, ne, at, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000484
Ben Murdoch097c5b22016-05-18 11:27:45 +0100485 __ GetInstanceType(a2, a2);
486 __ Branch(runtime_call, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
487 __ GetInstanceType(a3, a3);
488 __ Branch(runtime_call, lt, a3, Operand(FIRST_JS_RECEIVER_TYPE));
489
490 __ bind(&return_unequal);
491 // Return non-equal by returning the non-zero object pointer in v0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492 __ Ret(USE_DELAY_SLOT);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100493 __ mov(v0, a0); // In delay slot.
494
495 __ bind(&undetectable);
496 __ And(at, t1, Operand(1 << Map::kIsUndetectable));
497 __ Branch(&return_unequal, eq, at, Operand(zero_reg));
Ben Murdochda12d292016-06-02 14:46:10 +0100498
499 // If both sides are JSReceivers, then the result is false according to
500 // the HTML specification, which says that only comparisons with null or
501 // undefined are affected by special casing for document.all.
502 __ GetInstanceType(a2, a2);
503 __ Branch(&return_equal, eq, a2, Operand(ODDBALL_TYPE));
504 __ GetInstanceType(a3, a3);
505 __ Branch(&return_unequal, ne, a3, Operand(ODDBALL_TYPE));
506
507 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100508 __ Ret(USE_DELAY_SLOT);
509 __ li(v0, Operand(EQUAL)); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510}
511
512
513static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
514 Register scratch,
515 CompareICState::State expected,
516 Label* fail) {
517 Label ok;
518 if (expected == CompareICState::SMI) {
519 __ JumpIfNotSmi(input, fail);
520 } else if (expected == CompareICState::NUMBER) {
521 __ JumpIfSmi(input, &ok);
522 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
523 DONT_DO_SMI_CHECK);
524 }
525 // We could be strict about internalized/string here, but as long as
526 // hydrogen doesn't care, the stub doesn't have to care either.
527 __ bind(&ok);
528}
529
530
531// On entry a1 and a2 are the values to be compared.
532// On exit a0 is 0, positive or negative to indicate the result of
533// the comparison.
534void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
535 Register lhs = a1;
536 Register rhs = a0;
537 Condition cc = GetCondition();
538
539 Label miss;
540 CompareICStub_CheckInputType(masm, lhs, a2, left(), &miss);
541 CompareICStub_CheckInputType(masm, rhs, a3, right(), &miss);
542
543 Label slow; // Call builtin.
544 Label not_smis, both_loaded_as_doubles;
545
546 Label not_two_smis, smi_done;
547 __ Or(a2, a1, a0);
548 __ JumpIfNotSmi(a2, &not_two_smis);
549 __ SmiUntag(a1);
550 __ SmiUntag(a0);
551
552 __ Ret(USE_DELAY_SLOT);
553 __ dsubu(v0, a1, a0);
554 __ bind(&not_two_smis);
555
556 // NOTICE! This code is only reached after a smi-fast-case check, so
557 // it is certain that at least one operand isn't a smi.
558
559 // Handle the case where the objects are identical. Either returns the answer
560 // or goes to slow. Only falls through if the objects were not identical.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100561 EmitIdenticalObjectComparison(masm, &slow, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000562
563 // If either is a Smi (we know that not both are), then they can only
564 // be strictly equal if the other is a HeapNumber.
565 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000566 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000567 __ And(a6, lhs, Operand(rhs));
568 __ JumpIfNotSmi(a6, &not_smis, a4);
569 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
570 // 1) Return the answer.
571 // 2) Go to slow.
572 // 3) Fall through to both_loaded_as_doubles.
573 // 4) Jump to rhs_not_nan.
574 // In cases 3 and 4 we have found out we were dealing with a number-number
575 // comparison and the numbers have been loaded into f12 and f14 as doubles,
576 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
577 EmitSmiNonsmiComparison(masm, lhs, rhs,
578 &both_loaded_as_doubles, &slow, strict());
579
580 __ bind(&both_loaded_as_doubles);
581 // f12, f14 are the double representations of the left hand side
582 // and the right hand side if we have FPU. Otherwise a2, a3 represent
583 // left hand side and a0, a1 represent right hand side.
584
585 Label nan;
586 __ li(a4, Operand(LESS));
587 __ li(a5, Operand(GREATER));
588 __ li(a6, Operand(EQUAL));
589
590 // Check if either rhs or lhs is NaN.
591 __ BranchF(NULL, &nan, eq, f12, f14);
592
593 // Check if LESS condition is satisfied. If true, move conditionally
594 // result to v0.
595 if (kArchVariant != kMips64r6) {
596 __ c(OLT, D, f12, f14);
597 __ Movt(v0, a4);
598 // Use previous check to store conditionally to v0 oposite condition
599 // (GREATER). If rhs is equal to lhs, this will be corrected in next
600 // check.
601 __ Movf(v0, a5);
602 // Check if EQUAL condition is satisfied. If true, move conditionally
603 // result to v0.
604 __ c(EQ, D, f12, f14);
605 __ Movt(v0, a6);
606 } else {
607 Label skip;
608 __ BranchF(USE_DELAY_SLOT, &skip, NULL, lt, f12, f14);
609 __ mov(v0, a4); // Return LESS as result.
610
611 __ BranchF(USE_DELAY_SLOT, &skip, NULL, eq, f12, f14);
612 __ mov(v0, a6); // Return EQUAL as result.
613
614 __ mov(v0, a5); // Return GREATER as result.
615 __ bind(&skip);
616 }
617 __ Ret();
618
619 __ bind(&nan);
620 // NaN comparisons always fail.
621 // Load whatever we need in v0 to make the comparison fail.
622 DCHECK(is_int16(GREATER) && is_int16(LESS));
623 __ Ret(USE_DELAY_SLOT);
624 if (cc == lt || cc == le) {
625 __ li(v0, Operand(GREATER));
626 } else {
627 __ li(v0, Operand(LESS));
628 }
629
630
631 __ bind(&not_smis);
632 // At this point we know we are dealing with two different objects,
633 // and neither of them is a Smi. The objects are in lhs_ and rhs_.
634 if (strict()) {
635 // This returns non-equal for some object types, or falls through if it
636 // was not lucky.
637 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
638 }
639
640 Label check_for_internalized_strings;
641 Label flat_string_check;
642 // Check for heap-number-heap-number comparison. Can jump to slow case,
643 // or load both doubles and jump to the code that handles
644 // that case. If the inputs are not doubles then jumps to
645 // check_for_internalized_strings.
646 // In this case a2 will contain the type of lhs_.
647 EmitCheckForTwoHeapNumbers(masm,
648 lhs,
649 rhs,
650 &both_loaded_as_doubles,
651 &check_for_internalized_strings,
652 &flat_string_check);
653
654 __ bind(&check_for_internalized_strings);
655 if (cc == eq && !strict()) {
656 // Returns an answer for two internalized strings or two
657 // detectable objects.
658 // Otherwise jumps to string case or not both strings case.
659 // Assumes that a2 is the type of lhs_ on entry.
660 EmitCheckForInternalizedStringsOrObjects(
661 masm, lhs, rhs, &flat_string_check, &slow);
662 }
663
664 // Check for both being sequential one-byte strings,
665 // and inline if that is the case.
666 __ bind(&flat_string_check);
667
668 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow);
669
670 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
671 a3);
672 if (cc == eq) {
673 StringHelper::GenerateFlatOneByteStringEquals(masm, lhs, rhs, a2, a3, a4);
674 } else {
675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, a2, a3, a4,
676 a5);
677 }
678 // Never falls through to here.
679
680 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 if (cc == eq) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100682 {
683 FrameScope scope(masm, StackFrame::INTERNAL);
684 __ Push(lhs, rhs);
685 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
686 }
687 // Turn true into 0 and false into some non-zero value.
688 STATIC_ASSERT(EQUAL == 0);
689 __ LoadRoot(a0, Heap::kTrueValueRootIndex);
690 __ Ret(USE_DELAY_SLOT);
691 __ subu(v0, v0, a0); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100693 // Prepare for call to builtin. Push object pointers, a0 (lhs) first,
694 // a1 (rhs) second.
695 __ Push(lhs, rhs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696 int ncr; // NaN compare result.
697 if (cc == lt || cc == le) {
698 ncr = GREATER;
699 } else {
700 DCHECK(cc == gt || cc == ge); // Remaining cases.
701 ncr = LESS;
702 }
703 __ li(a0, Operand(Smi::FromInt(ncr)));
704 __ push(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000706 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
707 // tagged as a small integer.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100708 __ TailCallRuntime(Runtime::kCompare);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000710
711 __ bind(&miss);
712 GenerateMiss(masm);
713}
714
715
716void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
717 __ mov(t9, ra);
718 __ pop(ra);
719 __ PushSafepointRegisters();
720 __ Jump(t9);
721}
722
723
724void RestoreRegistersStateStub::Generate(MacroAssembler* masm) {
725 __ mov(t9, ra);
726 __ pop(ra);
727 __ PopSafepointRegisters();
728 __ Jump(t9);
729}
730
731
732void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
733 // We don't allow a GC during a store buffer overflow so there is no need to
734 // store the registers in any particular way, but we do have to store and
735 // restore them.
736 __ MultiPush(kJSCallerSaved | ra.bit());
737 if (save_doubles()) {
738 __ MultiPushFPU(kCallerSavedFPU);
739 }
740 const int argument_count = 1;
741 const int fp_argument_count = 0;
742 const Register scratch = a1;
743
744 AllowExternalCallThatCantCauseGC scope(masm);
745 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
746 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
747 __ CallCFunction(
748 ExternalReference::store_buffer_overflow_function(isolate()),
749 argument_count);
750 if (save_doubles()) {
751 __ MultiPopFPU(kCallerSavedFPU);
752 }
753
754 __ MultiPop(kJSCallerSaved | ra.bit());
755 __ Ret();
756}
757
758
759void MathPowStub::Generate(MacroAssembler* masm) {
760 const Register base = a1;
761 const Register exponent = MathPowTaggedDescriptor::exponent();
762 DCHECK(exponent.is(a2));
763 const Register heapnumbermap = a5;
764 const Register heapnumber = v0;
765 const DoubleRegister double_base = f2;
766 const DoubleRegister double_exponent = f4;
767 const DoubleRegister double_result = f0;
768 const DoubleRegister double_scratch = f6;
769 const FPURegister single_scratch = f8;
770 const Register scratch = t1;
771 const Register scratch2 = a7;
772
773 Label call_runtime, done, int_exponent;
774 if (exponent_type() == ON_STACK) {
775 Label base_is_smi, unpack_exponent;
776 // The exponent and base are supplied as arguments on the stack.
777 // This can only happen if the stub is called from non-optimized code.
778 // Load input parameters from stack to double registers.
779 __ ld(base, MemOperand(sp, 1 * kPointerSize));
780 __ ld(exponent, MemOperand(sp, 0 * kPointerSize));
781
782 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
783
784 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
785 __ ld(scratch, FieldMemOperand(base, JSObject::kMapOffset));
786 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
787
788 __ ldc1(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
789 __ jmp(&unpack_exponent);
790
791 __ bind(&base_is_smi);
792 __ mtc1(scratch, single_scratch);
793 __ cvt_d_w(double_base, single_scratch);
794 __ bind(&unpack_exponent);
795
796 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
797
798 __ ld(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
799 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
800 __ ldc1(double_exponent,
801 FieldMemOperand(exponent, HeapNumber::kValueOffset));
802 } else if (exponent_type() == TAGGED) {
803 // Base is already in double_base.
804 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
805
806 __ ldc1(double_exponent,
807 FieldMemOperand(exponent, HeapNumber::kValueOffset));
808 }
809
810 if (exponent_type() != INTEGER) {
811 Label int_exponent_convert;
812 // Detect integer exponents stored as double.
813 __ EmitFPUTruncate(kRoundToMinusInf,
814 scratch,
815 double_exponent,
816 at,
817 double_scratch,
818 scratch2,
819 kCheckForInexactConversion);
820 // scratch2 == 0 means there was no conversion error.
821 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
822
823 if (exponent_type() == ON_STACK) {
824 // Detect square root case. Crankshaft detects constant +/-0.5 at
825 // compile time and uses DoMathPowHalf instead. We then skip this check
826 // for non-constant cases of +/-0.5 as these hardly occur.
827 Label not_plus_half;
828
829 // Test for 0.5.
830 __ Move(double_scratch, 0.5);
831 __ BranchF(USE_DELAY_SLOT,
832 &not_plus_half,
833 NULL,
834 ne,
835 double_exponent,
836 double_scratch);
837 // double_scratch can be overwritten in the delay slot.
838 // Calculates square root of base. Check for the special case of
839 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400840 __ Move(double_scratch, static_cast<double>(-V8_INFINITY));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000841 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
842 __ neg_d(double_result, double_scratch);
843
844 // Add +0 to convert -0 to +0.
845 __ add_d(double_scratch, double_base, kDoubleRegZero);
846 __ sqrt_d(double_result, double_scratch);
847 __ jmp(&done);
848
849 __ bind(&not_plus_half);
850 __ Move(double_scratch, -0.5);
851 __ BranchF(USE_DELAY_SLOT,
852 &call_runtime,
853 NULL,
854 ne,
855 double_exponent,
856 double_scratch);
857 // double_scratch can be overwritten in the delay slot.
858 // Calculates square root of base. Check for the special case of
859 // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400860 __ Move(double_scratch, static_cast<double>(-V8_INFINITY));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000861 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
862 __ Move(double_result, kDoubleRegZero);
863
864 // Add +0 to convert -0 to +0.
865 __ add_d(double_scratch, double_base, kDoubleRegZero);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400866 __ Move(double_result, 1.);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000867 __ sqrt_d(double_scratch, double_scratch);
868 __ div_d(double_result, double_result, double_scratch);
869 __ jmp(&done);
870 }
871
872 __ push(ra);
873 {
874 AllowExternalCallThatCantCauseGC scope(masm);
875 __ PrepareCallCFunction(0, 2, scratch2);
876 __ MovToFloatParameters(double_base, double_exponent);
877 __ CallCFunction(
878 ExternalReference::power_double_double_function(isolate()),
879 0, 2);
880 }
881 __ pop(ra);
882 __ MovFromFloatResult(double_result);
883 __ jmp(&done);
884
885 __ bind(&int_exponent_convert);
886 }
887
888 // Calculate power with integer exponent.
889 __ bind(&int_exponent);
890
891 // Get two copies of exponent in the registers scratch and exponent.
892 if (exponent_type() == INTEGER) {
893 __ mov(scratch, exponent);
894 } else {
895 // Exponent has previously been stored into scratch as untagged integer.
896 __ mov(exponent, scratch);
897 }
898
899 __ mov_d(double_scratch, double_base); // Back up base.
900 __ Move(double_result, 1.0);
901
902 // Get absolute value of exponent.
903 Label positive_exponent;
904 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
905 __ Dsubu(scratch, zero_reg, scratch);
906 __ bind(&positive_exponent);
907
908 Label while_true, no_carry, loop_end;
909 __ bind(&while_true);
910
911 __ And(scratch2, scratch, 1);
912
913 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
914 __ mul_d(double_result, double_result, double_scratch);
915 __ bind(&no_carry);
916
917 __ dsra(scratch, scratch, 1);
918
919 __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
920 __ mul_d(double_scratch, double_scratch, double_scratch);
921
922 __ Branch(&while_true);
923
924 __ bind(&loop_end);
925
926 __ Branch(&done, ge, exponent, Operand(zero_reg));
927 __ Move(double_scratch, 1.0);
928 __ div_d(double_result, double_scratch, double_result);
929 // Test whether result is zero. Bail out to check for subnormal result.
930 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
931 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero);
932
933 // double_exponent may not contain the exponent value if the input was a
934 // smi. We set it with exponent value before bailing out.
935 __ mtc1(exponent, single_scratch);
936 __ cvt_d_w(double_exponent, single_scratch);
937
938 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000939 if (exponent_type() == ON_STACK) {
940 // The arguments are still on the stack.
941 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000943
944 // The stub is called from non-optimized code, which expects the result
945 // as heap number in exponent.
946 __ bind(&done);
947 __ AllocateHeapNumber(
948 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
949 __ sdc1(double_result,
950 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
951 DCHECK(heapnumber.is(v0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952 __ DropAndRet(2);
953 } else {
954 __ push(ra);
955 {
956 AllowExternalCallThatCantCauseGC scope(masm);
957 __ PrepareCallCFunction(0, 2, scratch);
958 __ MovToFloatParameters(double_base, double_exponent);
959 __ CallCFunction(
960 ExternalReference::power_double_double_function(isolate()),
961 0, 2);
962 }
963 __ pop(ra);
964 __ MovFromFloatResult(double_result);
965
966 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000967 __ Ret();
968 }
969}
970
971
972bool CEntryStub::NeedsImmovableCode() {
973 return true;
974}
975
976
977void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
978 CEntryStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000979 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
980 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100981 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000982 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000983 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000984 BinaryOpICStub::GenerateAheadOfTime(isolate);
985 StoreRegistersStateStub::GenerateAheadOfTime(isolate);
986 RestoreRegistersStateStub::GenerateAheadOfTime(isolate);
987 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988 StoreFastElementStub::GenerateAheadOfTime(isolate);
989 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000990}
991
992
993void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
994 StoreRegistersStateStub stub(isolate);
995 stub.GetCode();
996}
997
998
999void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
1000 RestoreRegistersStateStub stub(isolate);
1001 stub.GetCode();
1002}
1003
1004
1005void CodeStub::GenerateFPStubs(Isolate* isolate) {
1006 // Generate if not already in cache.
1007 SaveFPRegsMode mode = kSaveFPRegs;
1008 CEntryStub(isolate, 1, mode).GetCode();
1009 StoreBufferOverflowStub(isolate, mode).GetCode();
1010 isolate->set_fp_stubs_generated(true);
1011}
1012
1013
1014void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1015 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1016 stub.GetCode();
1017}
1018
1019
1020void CEntryStub::Generate(MacroAssembler* masm) {
1021 // Called from JavaScript; parameters are on stack as if calling JS function
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001022 // a0: number of arguments including receiver
1023 // a1: pointer to builtin function
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001024 // fp: frame pointer (restored after C call)
1025 // sp: stack pointer (restored as callee's sp after C call)
1026 // cp: current context (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001027 //
1028 // If argv_in_register():
1029 // a2: pointer to the first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001030
1031 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1032
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001033 if (argv_in_register()) {
1034 // Move argv into the correct register.
1035 __ mov(s1, a2);
1036 } else {
1037 // Compute the argv pointer in a callee-saved register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001038 __ Dlsa(s1, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039 __ Dsubu(s1, s1, kPointerSize);
1040 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041
1042 // Enter the exit frame that transitions from JavaScript to C++.
1043 FrameScope scope(masm, StackFrame::MANUAL);
1044 __ EnterExitFrame(save_doubles());
1045
1046 // s0: number of arguments including receiver (C callee-saved)
1047 // s1: pointer to first argument (C callee-saved)
1048 // s2: pointer to builtin function (C callee-saved)
1049
1050 // Prepare arguments for C routine.
1051 // a0 = argc
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001052 __ mov(s0, a0);
1053 __ mov(s2, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001054
1055 // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
1056 // also need to reserve the 4 argument slots on the stack.
1057
1058 __ AssertStackIsAligned();
1059
Ben Murdoch097c5b22016-05-18 11:27:45 +01001060 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1061 int frame_alignment_mask = frame_alignment - 1;
1062 int result_stack_size;
1063 if (result_size() <= 2) {
1064 // a0 = argc, a1 = argv, a2 = isolate
1065 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1066 __ mov(a1, s1);
1067 result_stack_size = 0;
1068 } else {
1069 DCHECK_EQ(3, result_size());
1070 // Allocate additional space for the result.
1071 result_stack_size =
1072 ((result_size() * kPointerSize) + frame_alignment_mask) &
1073 ~frame_alignment_mask;
1074 __ Dsubu(sp, sp, Operand(result_stack_size));
1075
1076 // a0 = hidden result argument, a1 = argc, a2 = argv, a3 = isolate.
1077 __ li(a3, Operand(ExternalReference::isolate_address(isolate())));
1078 __ mov(a2, s1);
1079 __ mov(a1, a0);
1080 __ mov(a0, sp);
1081 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082
1083 // To let the GC traverse the return address of the exit frames, we need to
1084 // know where the return address is. The CEntryStub is unmovable, so
1085 // we can store the address on the stack to be able to find it again and
1086 // we never have to restore it, because it will not change.
1087 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001088 int kNumInstructionsToJump = 4;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001089 Label find_ra;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001090 // Adjust the value in ra to point to the correct return location, 2nd
1091 // instruction past the real call into C code (the jalr(t9)), and push it.
1092 // This is the return address of the exit frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001093 if (kArchVariant >= kMips64r6) {
1094 __ addiupc(ra, kNumInstructionsToJump + 1);
1095 } else {
1096 // This branch-and-link sequence is needed to find the current PC on mips
1097 // before r6, saved to the ra register.
1098 __ bal(&find_ra); // bal exposes branch delay slot.
1099 __ Daddu(ra, ra, kNumInstructionsToJump * Instruction::kInstrSize);
1100 }
1101 __ bind(&find_ra);
1102
1103 // This spot was reserved in EnterExitFrame.
1104 __ sd(ra, MemOperand(sp, result_stack_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001105 // Stack space reservation moved to the branch delay slot below.
1106 // Stack is still aligned.
1107
1108 // Call the C routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001109 __ mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC.
1110 __ jalr(t9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001111 // Set up sp in the delay slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001112 __ daddiu(sp, sp, -kCArgsSlotsSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001113 // Make sure the stored 'ra' points to this position.
1114 DCHECK_EQ(kNumInstructionsToJump,
1115 masm->InstructionsGeneratedSince(&find_ra));
1116 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001117 if (result_size() > 2) {
1118 DCHECK_EQ(3, result_size());
1119 // Read result values stored on stack.
1120 __ ld(a0, MemOperand(v0, 2 * kPointerSize));
1121 __ ld(v1, MemOperand(v0, 1 * kPointerSize));
1122 __ ld(v0, MemOperand(v0, 0 * kPointerSize));
1123 }
1124 // Result returned in v0, v1:v0 or a0:v1:v0 - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001125
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001126 // Check result for exception sentinel.
1127 Label exception_returned;
1128 __ LoadRoot(a4, Heap::kExceptionRootIndex);
1129 __ Branch(&exception_returned, eq, a4, Operand(v0));
1130
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001131 // Check that there is no pending exception, otherwise we
1132 // should have returned the exception sentinel.
1133 if (FLAG_debug_code) {
1134 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001135 ExternalReference pending_exception_address(
1136 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001137 __ li(a2, Operand(pending_exception_address));
1138 __ ld(a2, MemOperand(a2));
1139 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
1140 // Cannot use check here as it attempts to generate call into runtime.
1141 __ Branch(&okay, eq, a4, Operand(a2));
1142 __ stop("Unexpected pending exception");
1143 __ bind(&okay);
1144 }
1145
1146 // Exit C frame and return.
1147 // v0:v1: result
1148 // sp: stack pointer
1149 // fp: frame pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001150 Register argc;
1151 if (argv_in_register()) {
1152 // We don't want to pop arguments so set argc to no_reg.
1153 argc = no_reg;
1154 } else {
1155 // s0: still holds argc (callee-saved).
1156 argc = s0;
1157 }
1158 __ LeaveExitFrame(save_doubles(), argc, true, EMIT_RETURN);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001159
1160 // Handling of exception.
1161 __ bind(&exception_returned);
1162
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001163 ExternalReference pending_handler_context_address(
1164 Isolate::kPendingHandlerContextAddress, isolate());
1165 ExternalReference pending_handler_code_address(
1166 Isolate::kPendingHandlerCodeAddress, isolate());
1167 ExternalReference pending_handler_offset_address(
1168 Isolate::kPendingHandlerOffsetAddress, isolate());
1169 ExternalReference pending_handler_fp_address(
1170 Isolate::kPendingHandlerFPAddress, isolate());
1171 ExternalReference pending_handler_sp_address(
1172 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001174 // Ask the runtime for help to determine the handler. This will set v0 to
1175 // contain the current pending exception, don't clobber it.
1176 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1177 isolate());
1178 {
1179 FrameScope scope(masm, StackFrame::MANUAL);
1180 __ PrepareCallCFunction(3, 0, a0);
1181 __ mov(a0, zero_reg);
1182 __ mov(a1, zero_reg);
1183 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1184 __ CallCFunction(find_handler, 3);
1185 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001186
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001187 // Retrieve the handler context, SP and FP.
1188 __ li(cp, Operand(pending_handler_context_address));
1189 __ ld(cp, MemOperand(cp));
1190 __ li(sp, Operand(pending_handler_sp_address));
1191 __ ld(sp, MemOperand(sp));
1192 __ li(fp, Operand(pending_handler_fp_address));
1193 __ ld(fp, MemOperand(fp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001194
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001195 // If the handler is a JS frame, restore the context to the frame. Note that
1196 // the context will be set to (cp == 0) for non-JS frames.
1197 Label zero;
1198 __ Branch(&zero, eq, cp, Operand(zero_reg));
1199 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1200 __ bind(&zero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001202 // Compute the handler entry address and jump to it.
1203 __ li(a1, Operand(pending_handler_code_address));
1204 __ ld(a1, MemOperand(a1));
1205 __ li(a2, Operand(pending_handler_offset_address));
1206 __ ld(a2, MemOperand(a2));
1207 __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
1208 __ Daddu(t9, a1, a2);
1209 __ Jump(t9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210}
1211
1212
1213void JSEntryStub::Generate(MacroAssembler* masm) {
1214 Label invoke, handler_entry, exit;
1215 Isolate* isolate = masm->isolate();
1216
1217 // TODO(plind): unify the ABI description here.
1218 // Registers:
1219 // a0: entry address
1220 // a1: function
1221 // a2: receiver
1222 // a3: argc
1223 // a4 (a4): on mips64
1224
1225 // Stack:
1226 // 0 arg slots on mips64 (4 args slots on mips)
1227 // args -- in a4/a4 on mips64, on stack on mips
1228
1229 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1230
1231 // Save callee saved registers on the stack.
1232 __ MultiPush(kCalleeSaved | ra.bit());
1233
1234 // Save callee-saved FPU registers.
1235 __ MultiPushFPU(kCalleeSavedFPU);
1236 // Set up the reserved register for 0.0.
1237 __ Move(kDoubleRegZero, 0.0);
1238
1239 // Load argv in s0 register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001240 __ mov(s0, a4); // 5th parameter in mips64 a4 (a4) register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241
1242 __ InitializeRootRegister();
1243
1244 // We build an EntryFrame.
1245 __ li(a7, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1246 int marker = type();
1247 __ li(a6, Operand(Smi::FromInt(marker)));
1248 __ li(a5, Operand(Smi::FromInt(marker)));
1249 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
1250 __ li(a4, Operand(c_entry_fp));
1251 __ ld(a4, MemOperand(a4));
1252 __ Push(a7, a6, a5, a4);
1253 // Set up frame pointer for the frame to be pushed.
1254 __ daddiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
1255
1256 // Registers:
1257 // a0: entry_address
1258 // a1: function
1259 // a2: receiver_pointer
1260 // a3: argc
1261 // s0: argv
1262 //
1263 // Stack:
1264 // caller fp |
1265 // function slot | entry frame
1266 // context slot |
1267 // bad fp (0xff...f) |
1268 // callee saved registers + ra
1269 // [ O32: 4 args slots]
1270 // args
1271
1272 // If this is the outermost JS call, set js_entry_sp value.
1273 Label non_outermost_js;
1274 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1275 __ li(a5, Operand(ExternalReference(js_entry_sp)));
1276 __ ld(a6, MemOperand(a5));
1277 __ Branch(&non_outermost_js, ne, a6, Operand(zero_reg));
1278 __ sd(fp, MemOperand(a5));
1279 __ li(a4, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1280 Label cont;
1281 __ b(&cont);
1282 __ nop(); // Branch delay slot nop.
1283 __ bind(&non_outermost_js);
1284 __ li(a4, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1285 __ bind(&cont);
1286 __ push(a4);
1287
1288 // Jump to a faked try block that does the invoke, with a faked catch
1289 // block that sets the pending exception.
1290 __ jmp(&invoke);
1291 __ bind(&handler_entry);
1292 handler_offset_ = handler_entry.pos();
1293 // Caught exception: Store result (exception) in the pending exception
1294 // field in the JSEnv and return a failure sentinel. Coming in here the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001295 // fp will be invalid because the PushStackHandler below sets it to 0 to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001296 // signal the existence of the JSEntry frame.
1297 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1298 isolate)));
1299 __ sd(v0, MemOperand(a4)); // We come back from 'invoke'. result is in v0.
1300 __ LoadRoot(v0, Heap::kExceptionRootIndex);
1301 __ b(&exit); // b exposes branch delay slot.
1302 __ nop(); // Branch delay slot nop.
1303
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001304 // Invoke: Link this frame into the handler chain.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001305 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001306 __ PushStackHandler();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001307 // If an exception not caught by another handler occurs, this handler
1308 // returns control to the code after the bal(&invoke) above, which
1309 // restores all kCalleeSaved registers (including cp and fp) to their
1310 // saved values before returning a failure to C.
1311
1312 // Clear any pending exceptions.
1313 __ LoadRoot(a5, Heap::kTheHoleValueRootIndex);
1314 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1315 isolate)));
1316 __ sd(a5, MemOperand(a4));
1317
1318 // Invoke the function by calling through JS entry trampoline builtin.
1319 // Notice that we cannot store a reference to the trampoline code directly in
1320 // this stub, because runtime stubs are not traversed when doing GC.
1321
1322 // Registers:
1323 // a0: entry_address
1324 // a1: function
1325 // a2: receiver_pointer
1326 // a3: argc
1327 // s0: argv
1328 //
1329 // Stack:
1330 // handler frame
1331 // entry frame
1332 // callee saved registers + ra
1333 // [ O32: 4 args slots]
1334 // args
1335
1336 if (type() == StackFrame::ENTRY_CONSTRUCT) {
1337 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1338 isolate);
1339 __ li(a4, Operand(construct_entry));
1340 } else {
1341 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
1342 __ li(a4, Operand(entry));
1343 }
1344 __ ld(t9, MemOperand(a4)); // Deref address.
1345 // Call JSEntryTrampoline.
1346 __ daddiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
1347 __ Call(t9);
1348
1349 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001350 __ PopStackHandler();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001351
1352 __ bind(&exit); // v0 holds result
1353 // Check if the current stack frame is marked as the outermost JS frame.
1354 Label non_outermost_js_2;
1355 __ pop(a5);
1356 __ Branch(&non_outermost_js_2,
1357 ne,
1358 a5,
1359 Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1360 __ li(a5, Operand(ExternalReference(js_entry_sp)));
1361 __ sd(zero_reg, MemOperand(a5));
1362 __ bind(&non_outermost_js_2);
1363
1364 // Restore the top frame descriptors from the stack.
1365 __ pop(a5);
1366 __ li(a4, Operand(ExternalReference(Isolate::kCEntryFPAddress,
1367 isolate)));
1368 __ sd(a5, MemOperand(a4));
1369
1370 // Reset the stack to the callee saved registers.
1371 __ daddiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
1372
1373 // Restore callee-saved fpu registers.
1374 __ MultiPopFPU(kCalleeSavedFPU);
1375
1376 // Restore callee saved registers from the stack.
1377 __ MultiPop(kCalleeSaved | ra.bit());
1378 // Return.
1379 __ Jump(ra);
1380}
1381
1382
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001383void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1384 // Return address is in ra.
1385 Label miss;
1386
1387 Register receiver = LoadDescriptor::ReceiverRegister();
1388 Register index = LoadDescriptor::NameRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389 Register scratch = a5;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001390 Register result = v0;
1391 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001392 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001393
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001394 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1395 &miss, // When not a string.
1396 &miss, // When not a number.
1397 &miss, // When index out of range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001398 RECEIVER_IS_STRING);
1399 char_at_generator.GenerateFast(masm);
1400 __ Ret();
1401
1402 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001403 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001404
1405 __ bind(&miss);
1406 PropertyAccessCompiler::TailCallBuiltin(
1407 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1408}
1409
1410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001411void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1412 Label miss;
1413 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001414 // Ensure that the vector and slot registers won't be clobbered before
1415 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001416 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::VectorRegister(),
1417 LoadWithVectorDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001418
1419 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, a4,
1420 a5, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001421 __ bind(&miss);
1422 PropertyAccessCompiler::TailCallBuiltin(
1423 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1424}
1425
1426
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001427void RegExpExecStub::Generate(MacroAssembler* masm) {
1428 // Just jump directly to runtime if native RegExp is not selected at compile
1429 // time or if regexp entry in generated code is turned off runtime switch or
1430 // at compilation.
1431#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001432 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001433#else // V8_INTERPRETED_REGEXP
1434
1435 // Stack frame on entry.
1436 // sp[0]: last_match_info (expected JSArray)
1437 // sp[4]: previous index
1438 // sp[8]: subject string
1439 // sp[12]: JSRegExp object
1440
1441 const int kLastMatchInfoOffset = 0 * kPointerSize;
1442 const int kPreviousIndexOffset = 1 * kPointerSize;
1443 const int kSubjectOffset = 2 * kPointerSize;
1444 const int kJSRegExpOffset = 3 * kPointerSize;
1445
1446 Label runtime;
1447 // Allocation of registers for this function. These are in callee save
1448 // registers and will be preserved by the call to the native RegExp code, as
1449 // this code is called using the normal C calling convention. When calling
1450 // directly from generated code the native RegExp code will not do a GC and
1451 // therefore the content of these registers are safe to use after the call.
1452 // MIPS - using s0..s2, since we are not using CEntry Stub.
1453 Register subject = s0;
1454 Register regexp_data = s1;
1455 Register last_match_info_elements = s2;
1456
1457 // Ensure that a RegExp stack is allocated.
1458 ExternalReference address_of_regexp_stack_memory_address =
1459 ExternalReference::address_of_regexp_stack_memory_address(
1460 isolate());
1461 ExternalReference address_of_regexp_stack_memory_size =
1462 ExternalReference::address_of_regexp_stack_memory_size(isolate());
1463 __ li(a0, Operand(address_of_regexp_stack_memory_size));
1464 __ ld(a0, MemOperand(a0, 0));
1465 __ Branch(&runtime, eq, a0, Operand(zero_reg));
1466
1467 // Check that the first argument is a JSRegExp object.
1468 __ ld(a0, MemOperand(sp, kJSRegExpOffset));
1469 STATIC_ASSERT(kSmiTag == 0);
1470 __ JumpIfSmi(a0, &runtime);
1471 __ GetObjectType(a0, a1, a1);
1472 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE));
1473
1474 // Check that the RegExp has been compiled (data contains a fixed array).
1475 __ ld(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset));
1476 if (FLAG_debug_code) {
1477 __ SmiTst(regexp_data, a4);
1478 __ Check(nz,
1479 kUnexpectedTypeForRegExpDataFixedArrayExpected,
1480 a4,
1481 Operand(zero_reg));
1482 __ GetObjectType(regexp_data, a0, a0);
1483 __ Check(eq,
1484 kUnexpectedTypeForRegExpDataFixedArrayExpected,
1485 a0,
1486 Operand(FIXED_ARRAY_TYPE));
1487 }
1488
1489 // regexp_data: RegExp data (FixedArray)
1490 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1491 __ ld(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
1492 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
1493
1494 // regexp_data: RegExp data (FixedArray)
1495 // Check that the number of captures fit in the static offsets vector buffer.
1496 __ ld(a2,
1497 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
1498 // Check (number_of_captures + 1) * 2 <= offsets vector size
1499 // Or number_of_captures * 2 <= offsets vector size - 2
1500 // Or number_of_captures <= offsets vector size / 2 - 1
1501 // Multiplying by 2 comes for free since a2 is smi-tagged.
1502 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1503 int temp = Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1;
1504 __ Branch(&runtime, hi, a2, Operand(Smi::FromInt(temp)));
1505
1506 // Reset offset for possibly sliced string.
1507 __ mov(t0, zero_reg);
1508 __ ld(subject, MemOperand(sp, kSubjectOffset));
1509 __ JumpIfSmi(subject, &runtime);
1510 __ mov(a3, subject); // Make a copy of the original subject string.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001511
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001512 // subject: subject string
1513 // a3: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001514 // regexp_data: RegExp data (FixedArray)
1515 // Handle subject string according to its encoding and representation:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001516 // (1) Sequential string? If yes, go to (4).
1517 // (2) Sequential or cons? If not, go to (5).
1518 // (3) Cons string. If the string is flat, replace subject with first string
1519 // and go to (1). Otherwise bail out to runtime.
1520 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001521 // (E) Carry on.
1522 /// [...]
1523
1524 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001525 // (5) Long external string? If not, go to (7).
1526 // (6) External string. Make it, offset-wise, look like a sequential string.
1527 // Go to (4).
1528 // (7) Short external string or not a string? If yes, bail out to runtime.
1529 // (8) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001530
Ben Murdoch097c5b22016-05-18 11:27:45 +01001531 Label check_underlying; // (1)
1532 Label seq_string; // (4)
1533 Label not_seq_nor_cons; // (5)
1534 Label external_string; // (6)
1535 Label not_long_external; // (7)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001536
Ben Murdoch097c5b22016-05-18 11:27:45 +01001537 __ bind(&check_underlying);
1538 __ ld(a2, FieldMemOperand(subject, HeapObject::kMapOffset));
1539 __ lbu(a0, FieldMemOperand(a2, Map::kInstanceTypeOffset));
1540
1541 // (1) Sequential string? If yes, go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001542 __ And(a1,
1543 a0,
1544 Operand(kIsNotStringMask |
1545 kStringRepresentationMask |
1546 kShortExternalStringMask));
1547 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001548 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); // Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001549
Ben Murdoch097c5b22016-05-18 11:27:45 +01001550 // (2) Sequential or cons? If not, go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001551 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1552 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1553 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1554 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001555 // Go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556 __ Branch(&not_seq_nor_cons, ge, a1, Operand(kExternalStringTag));
1557
1558 // (3) Cons string. Check that it's flat.
1559 // Replace subject with first string and reload instance type.
1560 __ ld(a0, FieldMemOperand(subject, ConsString::kSecondOffset));
1561 __ LoadRoot(a1, Heap::kempty_stringRootIndex);
1562 __ Branch(&runtime, ne, a0, Operand(a1));
1563 __ ld(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001564 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001565
Ben Murdoch097c5b22016-05-18 11:27:45 +01001566 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001567 __ bind(&seq_string);
1568 // subject: sequential subject string (or look-alike, external string)
1569 // a3: original subject string
1570 // Load previous index and check range before a3 is overwritten. We have to
1571 // use a3 instead of subject here because subject might have been only made
1572 // to look like a sequential string when it actually is an external string.
1573 __ ld(a1, MemOperand(sp, kPreviousIndexOffset));
1574 __ JumpIfNotSmi(a1, &runtime);
1575 __ ld(a3, FieldMemOperand(a3, String::kLengthOffset));
1576 __ Branch(&runtime, ls, a3, Operand(a1));
1577 __ SmiUntag(a1);
1578
1579 STATIC_ASSERT(kStringEncodingMask == 4);
1580 STATIC_ASSERT(kOneByteStringTag == 4);
1581 STATIC_ASSERT(kTwoByteStringTag == 0);
1582 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for one_byte.
1583 __ ld(t9, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset));
1584 __ dsra(a3, a0, 2); // a3 is 1 for one_byte, 0 for UC16 (used below).
1585 __ ld(a5, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
1586 __ Movz(t9, a5, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
1587
1588 // (E) Carry on. String handling is done.
1589 // t9: irregexp code
1590 // Check that the irregexp code has been generated for the actual string
1591 // encoding. If it has, the field contains a code object otherwise it contains
1592 // a smi (code flushing support).
1593 __ JumpIfSmi(t9, &runtime);
1594
1595 // a1: previous index
1596 // a3: encoding of subject string (1 if one_byte, 0 if two_byte);
1597 // t9: code
1598 // subject: Subject string
1599 // regexp_data: RegExp data (FixedArray)
1600 // All checks done. Now push arguments for native regexp code.
1601 __ IncrementCounter(isolate()->counters()->regexp_entry_native(),
1602 1, a0, a2);
1603
1604 // Isolates: note we add an additional parameter here (isolate pointer).
1605 const int kRegExpExecuteArguments = 9;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001606 const int kParameterRegisters = 8;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001607 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
1608
1609 // Stack pointer now points to cell where return address is to be written.
1610 // Arguments are before that on the stack or in registers, meaning we
1611 // treat the return address as argument 5. Thus every argument after that
1612 // needs to be shifted back by 1. Since DirectCEntryStub will handle
1613 // allocating space for the c argument slots, we don't need to calculate
1614 // that into the argument positions on the stack. This is how the stack will
1615 // look (sp meaning the value of sp at this moment):
1616 // Abi n64:
1617 // [sp + 1] - Argument 9
1618 // [sp + 0] - saved ra
1619 // Abi O32:
1620 // [sp + 5] - Argument 9
1621 // [sp + 4] - Argument 8
1622 // [sp + 3] - Argument 7
1623 // [sp + 2] - Argument 6
1624 // [sp + 1] - Argument 5
1625 // [sp + 0] - saved ra
1626
Ben Murdoch097c5b22016-05-18 11:27:45 +01001627 // Argument 9: Pass current isolate address.
1628 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
1629 __ sd(a0, MemOperand(sp, 1 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001630
Ben Murdoch097c5b22016-05-18 11:27:45 +01001631 // Argument 8: Indicate that this is a direct call from JavaScript.
1632 __ li(a7, Operand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001633
Ben Murdoch097c5b22016-05-18 11:27:45 +01001634 // Argument 7: Start (high end) of backtracking stack memory area.
1635 __ li(a0, Operand(address_of_regexp_stack_memory_address));
1636 __ ld(a0, MemOperand(a0, 0));
1637 __ li(a2, Operand(address_of_regexp_stack_memory_size));
1638 __ ld(a2, MemOperand(a2, 0));
1639 __ daddu(a6, a0, a2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001640
Ben Murdoch097c5b22016-05-18 11:27:45 +01001641 // Argument 6: Set the number of capture registers to zero to force global
1642 // regexps to behave as non-global. This does not affect non-global regexps.
1643 __ mov(a5, zero_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001644
Ben Murdoch097c5b22016-05-18 11:27:45 +01001645 // Argument 5: static offsets vector buffer.
1646 __ li(
1647 a4,
1648 Operand(ExternalReference::address_of_static_offsets_vector(isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649
1650 // For arguments 4 and 3 get string length, calculate start of string data
1651 // and calculate the shift of the index (0 for one_byte and 1 for two byte).
1652 __ Daddu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
1653 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte.
1654 // Load the length from the original subject string from the previous stack
1655 // frame. Therefore we have to use fp, which points exactly to two pointer
1656 // sizes below the previous sp. (Because creating a new stack frame pushes
1657 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
1658 __ ld(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
1659 // If slice offset is not 0, load the length from the original sliced string.
1660 // Argument 4, a3: End of string data
1661 // Argument 3, a2: Start of string data
1662 // Prepare start and end index of the input.
1663 __ dsllv(t1, t0, a3);
1664 __ daddu(t0, t2, t1);
1665 __ dsllv(t1, a1, a3);
1666 __ daddu(a2, t0, t1);
1667
1668 __ ld(t2, FieldMemOperand(subject, String::kLengthOffset));
1669
1670 __ SmiUntag(t2);
1671 __ dsllv(t1, t2, a3);
1672 __ daddu(a3, t0, t1);
1673 // Argument 2 (a1): Previous index.
1674 // Already there
1675
1676 // Argument 1 (a0): Subject string.
1677 __ mov(a0, subject);
1678
1679 // Locate the code entry and call it.
1680 __ Daddu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag));
1681 DirectCEntryStub stub(isolate());
1682 stub.GenerateCall(masm, t9);
1683
1684 __ LeaveExitFrame(false, no_reg, true);
1685
1686 // v0: result
1687 // subject: subject string (callee saved)
1688 // regexp_data: RegExp data (callee saved)
1689 // last_match_info_elements: Last match info elements (callee saved)
1690 // Check the result.
1691 Label success;
1692 __ Branch(&success, eq, v0, Operand(1));
1693 // We expect exactly one result since we force the called regexp to behave
1694 // as non-global.
1695 Label failure;
1696 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
1697 // If not exception it can only be retry. Handle that in the runtime system.
1698 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
1699 // Result must now be exception. If there is no pending exception already a
1700 // stack overflow (on the backtrack stack) was detected in RegExp code but
1701 // haven't created the exception yet. Handle that in the runtime system.
1702 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1703 __ li(a1, Operand(isolate()->factory()->the_hole_value()));
1704 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1705 isolate())));
1706 __ ld(v0, MemOperand(a2, 0));
1707 __ Branch(&runtime, eq, v0, Operand(a1));
1708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001709 // For exception, throw the exception again.
1710 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001711
1712 __ bind(&failure);
1713 // For failure and exception return null.
1714 __ li(v0, Operand(isolate()->factory()->null_value()));
1715 __ DropAndRet(4);
1716
1717 // Process the result from the native regexp code.
1718 __ bind(&success);
1719
1720 __ lw(a1, UntagSmiFieldMemOperand(
1721 regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
1722 // Calculate number of capture registers (number_of_captures + 1) * 2.
1723 __ Daddu(a1, a1, Operand(1));
1724 __ dsll(a1, a1, 1); // Multiply by 2.
1725
1726 __ ld(a0, MemOperand(sp, kLastMatchInfoOffset));
1727 __ JumpIfSmi(a0, &runtime);
1728 __ GetObjectType(a0, a2, a2);
1729 __ Branch(&runtime, ne, a2, Operand(JS_ARRAY_TYPE));
1730 // Check that the JSArray is in fast case.
1731 __ ld(last_match_info_elements,
1732 FieldMemOperand(a0, JSArray::kElementsOffset));
1733 __ ld(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
1734 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
1735 __ Branch(&runtime, ne, a0, Operand(at));
1736 // Check that the last match info has space for the capture registers and the
1737 // additional information.
1738 __ ld(a0,
1739 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
1740 __ Daddu(a2, a1, Operand(RegExpImpl::kLastMatchOverhead));
1741
1742 __ SmiUntag(at, a0);
1743 __ Branch(&runtime, gt, a2, Operand(at));
1744
1745 // a1: number of capture registers
1746 // subject: subject string
1747 // Store the capture count.
1748 __ SmiTag(a2, a1); // To smi.
1749 __ sd(a2, FieldMemOperand(last_match_info_elements,
1750 RegExpImpl::kLastCaptureCountOffset));
1751 // Store last subject and last input.
1752 __ sd(subject,
1753 FieldMemOperand(last_match_info_elements,
1754 RegExpImpl::kLastSubjectOffset));
1755 __ mov(a2, subject);
1756 __ RecordWriteField(last_match_info_elements,
1757 RegExpImpl::kLastSubjectOffset,
1758 subject,
1759 a7,
1760 kRAHasNotBeenSaved,
1761 kDontSaveFPRegs);
1762 __ mov(subject, a2);
1763 __ sd(subject,
1764 FieldMemOperand(last_match_info_elements,
1765 RegExpImpl::kLastInputOffset));
1766 __ RecordWriteField(last_match_info_elements,
1767 RegExpImpl::kLastInputOffset,
1768 subject,
1769 a7,
1770 kRAHasNotBeenSaved,
1771 kDontSaveFPRegs);
1772
1773 // Get the static offsets vector filled by the native regexp code.
1774 ExternalReference address_of_static_offsets_vector =
1775 ExternalReference::address_of_static_offsets_vector(isolate());
1776 __ li(a2, Operand(address_of_static_offsets_vector));
1777
1778 // a1: number of capture registers
1779 // a2: offsets vector
1780 Label next_capture, done;
1781 // Capture register counter starts from number of capture registers and
1782 // counts down until wrapping after zero.
1783 __ Daddu(a0,
1784 last_match_info_elements,
1785 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
1786 __ bind(&next_capture);
1787 __ Dsubu(a1, a1, Operand(1));
1788 __ Branch(&done, lt, a1, Operand(zero_reg));
1789 // Read the value from the static offsets vector buffer.
1790 __ lw(a3, MemOperand(a2, 0));
1791 __ daddiu(a2, a2, kIntSize);
1792 // Store the smi value in the last match info.
1793 __ SmiTag(a3);
1794 __ sd(a3, MemOperand(a0, 0));
1795 __ Branch(&next_capture, USE_DELAY_SLOT);
1796 __ daddiu(a0, a0, kPointerSize); // In branch delay slot.
1797
1798 __ bind(&done);
1799
1800 // Return last match info.
1801 __ ld(v0, MemOperand(sp, kLastMatchInfoOffset));
1802 __ DropAndRet(4);
1803
1804 // Do the runtime call to execute the regexp.
1805 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001806 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001807
1808 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001809 // (5) Long external string? If not, go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001810 __ bind(&not_seq_nor_cons);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001811 // Go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag));
1813
Ben Murdoch097c5b22016-05-18 11:27:45 +01001814 // (6) External string. Make it, offset-wise, look like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001815 __ bind(&external_string);
1816 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
1817 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
1818 if (FLAG_debug_code) {
1819 // Assert that we do not have a cons or slice (indirect strings) here.
1820 // Sequential strings have already been ruled out.
1821 __ And(at, a0, Operand(kIsIndirectStringMask));
1822 __ Assert(eq,
1823 kExternalStringExpectedButNotFound,
1824 at,
1825 Operand(zero_reg));
1826 }
1827 __ ld(subject,
1828 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
1829 // Move the pointer so that offset-wise, it looks like a sequential string.
1830 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1831 __ Dsubu(subject,
1832 subject,
1833 SeqTwoByteString::kHeaderSize - kHeapObjectTag);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001834 __ jmp(&seq_string); // Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001835
Ben Murdoch097c5b22016-05-18 11:27:45 +01001836 // (7) Short external string or not a string? If yes, bail out to runtime.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001837 __ bind(&not_long_external);
1838 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1839 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask));
1840 __ Branch(&runtime, ne, at, Operand(zero_reg));
1841
Ben Murdoch097c5b22016-05-18 11:27:45 +01001842 // (8) Sliced string. Replace subject with parent. Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001843 // Load offset into t0 and replace subject string with parent.
1844 __ ld(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
1845 __ SmiUntag(t0);
1846 __ ld(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001847 __ jmp(&check_underlying); // Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001848#endif // V8_INTERPRETED_REGEXP
1849}
1850
1851
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001852static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1853 // a0 : number of arguments to the construct function
1854 // a2 : feedback vector
1855 // a3 : slot in feedback vector (Smi)
1856 // a1 : the function to call
1857 FrameScope scope(masm, StackFrame::INTERNAL);
1858 const RegList kSavedRegs = 1 << 4 | // a0
1859 1 << 5 | // a1
1860 1 << 6 | // a2
1861 1 << 7; // a3
1862
1863
1864 // Number-of-arguments register must be smi-tagged to call out.
1865 __ SmiTag(a0);
1866 __ MultiPush(kSavedRegs);
1867
1868 __ CallStub(stub);
1869
1870 __ MultiPop(kSavedRegs);
1871 __ SmiUntag(a0);
1872}
1873
1874
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001875static void GenerateRecordCallTarget(MacroAssembler* masm) {
1876 // Cache the called function in a feedback vector slot. Cache states
1877 // are uninitialized, monomorphic (indicated by a JSFunction), and
1878 // megamorphic.
1879 // a0 : number of arguments to the construct function
1880 // a1 : the function to call
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001881 // a2 : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001882 // a3 : slot in feedback vector (Smi)
1883 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001884 Label done_initialize_count, done_increment_count;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001885
1886 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
1887 masm->isolate()->heap()->megamorphic_symbol());
1888 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()),
1889 masm->isolate()->heap()->uninitialized_symbol());
1890
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001891 // Load the cache state into a5.
1892 __ dsrl(a5, a3, 32 - kPointerSizeLog2);
1893 __ Daddu(a5, a2, Operand(a5));
1894 __ ld(a5, FieldMemOperand(a5, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001895
1896 // A monomorphic cache hit or an already megamorphic state: invoke the
1897 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001898 // We don't know if a5 is a WeakCell or a Symbol, but it's harmless to read at
1899 // this position in a symbol (see static asserts in type-feedback-vector.h).
1900 Label check_allocation_site;
1901 Register feedback_map = a6;
1902 Register weak_value = t0;
1903 __ ld(weak_value, FieldMemOperand(a5, WeakCell::kValueOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001904 __ Branch(&done_increment_count, eq, a1, Operand(weak_value));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001905 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
1906 __ Branch(&done, eq, a5, Operand(at));
1907 __ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset));
1908 __ LoadRoot(at, Heap::kWeakCellMapRootIndex);
1909 __ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001910
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001911 // If the weak cell is cleared, we have a new chance to become monomorphic.
1912 __ JumpIfSmi(weak_value, &initialize);
1913 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001914
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001915 __ bind(&check_allocation_site);
1916 // If we came here, we need to see if we are the array function.
1917 // If we didn't have a matching function, and we didn't find the megamorph
1918 // sentinel, then we have in the slot either some other function or an
1919 // AllocationSite.
1920 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
1921 __ Branch(&miss, ne, feedback_map, Operand(at));
1922
1923 // Make sure the function is the Array() function
1924 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
1925 __ Branch(&megamorphic, ne, a1, Operand(a5));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001926 __ jmp(&done_increment_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001927
1928 __ bind(&miss);
1929
1930 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1931 // megamorphic.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001932 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001933 __ Branch(&initialize, eq, a5, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001934 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1935 // write-barrier is needed.
1936 __ bind(&megamorphic);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001937 __ dsrl(a5, a3, 32 - kPointerSizeLog2);
1938 __ Daddu(a5, a2, Operand(a5));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001939 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001940 __ sd(at, FieldMemOperand(a5, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001941 __ jmp(&done);
1942
1943 // An uninitialized cache is patched with the function.
1944 __ bind(&initialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001945 // Make sure the function is the Array() function.
1946 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
1947 __ Branch(&not_array_function, ne, a1, Operand(a5));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001949 // The target function is the Array constructor,
1950 // Create an AllocationSite if we don't already have it, store it in the
1951 // slot.
1952 CreateAllocationSiteStub create_stub(masm->isolate());
1953 CallStubInRecordCallTarget(masm, &create_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001954 __ Branch(&done_initialize_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001955
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001956 __ bind(&not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001957
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001958 CreateWeakCellStub weak_cell_stub(masm->isolate());
1959 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001960
1961 __ bind(&done_initialize_count);
1962 // Initialize the call counter.
1963
1964 __ SmiScale(a4, a3, kPointerSizeLog2);
1965 __ Daddu(a4, a2, Operand(a4));
1966 __ li(a5, Operand(Smi::FromInt(1)));
1967 __ Branch(USE_DELAY_SLOT, &done);
1968 __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + kPointerSize));
1969
1970 __ bind(&done_increment_count);
1971
1972 // Increment the call count for monomorphic function calls.
1973 __ SmiScale(a4, a3, kPointerSizeLog2);
1974 __ Daddu(a5, a2, Operand(a4));
1975 __ ld(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
1976 __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
1977 __ sd(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
1978
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001979 __ bind(&done);
1980}
1981
1982
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001983void CallConstructStub::Generate(MacroAssembler* masm) {
1984 // a0 : number of arguments
1985 // a1 : the function to call
1986 // a2 : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001987 // a3 : slot in feedback vector (Smi, for RecordCallTarget)
1988
1989 Label non_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001990 // Check that the function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001991 __ JumpIfSmi(a1, &non_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001992 // Check that the function is a JSFunction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001993 __ GetObjectType(a1, a5, a5);
1994 __ Branch(&non_function, ne, a5, Operand(JS_FUNCTION_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001995
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001997
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001998 __ dsrl(at, a3, 32 - kPointerSizeLog2);
1999 __ Daddu(a5, a2, at);
2000 Label feedback_register_initialized;
2001 // Put the AllocationSite from the feedback vector into a2, or undefined.
2002 __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
2003 __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
2004 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2005 __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
2006 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2007 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002008
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002009 __ AssertUndefinedOrAllocationSite(a2, a5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002011 // Pass function as new target.
2012 __ mov(a3, a1);
2013
2014 // Tail call to the function-specific construct stub (still in the caller
2015 // context at this point).
2016 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2017 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
2018 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002019 __ Jump(at);
2020
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002021 __ bind(&non_function);
2022 __ mov(a3, a1);
2023 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002024}
2025
2026
2027// StringCharCodeAtGenerator.
2028void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2029 DCHECK(!a4.is(index_));
2030 DCHECK(!a4.is(result_));
2031 DCHECK(!a4.is(object_));
2032
2033 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002034 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2035 __ JumpIfSmi(object_, receiver_not_string_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002037 // Fetch the instance type of the receiver into result register.
2038 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2039 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2040 // If the receiver is not a string trigger the non-string case.
2041 __ And(a4, result_, Operand(kIsNotStringMask));
2042 __ Branch(receiver_not_string_, ne, a4, Operand(zero_reg));
2043 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002044
2045 // If the index is non-smi trigger the non-smi case.
2046 __ JumpIfNotSmi(index_, &index_not_smi_);
2047
2048 __ bind(&got_smi_index_);
2049
2050 // Check for index out of range.
2051 __ ld(a4, FieldMemOperand(object_, String::kLengthOffset));
2052 __ Branch(index_out_of_range_, ls, a4, Operand(index_));
2053
2054 __ SmiUntag(index_);
2055
2056 StringCharLoadGenerator::Generate(masm,
2057 object_,
2058 index_,
2059 result_,
2060 &call_runtime_);
2061
2062 __ SmiTag(result_);
2063 __ bind(&exit_);
2064}
2065
2066
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002067void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002068 // a1 - function
2069 // a3 - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002070 // a2 - vector
2071 // a4 - allocation site (loaded from vector[slot])
2072 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at);
2073 __ Branch(miss, ne, a1, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002074
2075 __ li(a0, Operand(arg_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002076
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002077 // Increment the call count for monomorphic function calls.
2078 __ dsrl(t0, a3, 32 - kPointerSizeLog2);
2079 __ Daddu(a3, a2, Operand(t0));
2080 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
Ben Murdoch61f157c2016-09-16 13:49:30 +01002081 __ Daddu(t0, t0, Operand(Smi::FromInt(1)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002082 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002083
2084 __ mov(a2, a4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002085 __ mov(a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002086 ArrayConstructorStub stub(masm->isolate(), arg_count());
2087 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002088}
2089
2090
2091void CallICStub::Generate(MacroAssembler* masm) {
2092 // a1 - function
2093 // a3 - slot id (Smi)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002094 // a2 - vector
2095 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096 int argc = arg_count();
2097 ParameterCount actual(argc);
2098
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002099 // The checks. First, does r1 match the recorded monomorphic target?
2100 __ dsrl(a4, a3, 32 - kPointerSizeLog2);
2101 __ Daddu(a4, a2, Operand(a4));
2102 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002103
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002104 // We don't know that we have a weak cell. We might have a private symbol
2105 // or an AllocationSite, but the memory is safe to examine.
2106 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2107 // FixedArray.
2108 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2109 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2110 // computed, meaning that it can't appear to be a pointer. If the low bit is
2111 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2112 // to be a pointer.
2113 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2114 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2115 WeakCell::kValueOffset &&
2116 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 __ ld(a5, FieldMemOperand(a4, WeakCell::kValueOffset));
2119 __ Branch(&extra_checks_or_miss, ne, a1, Operand(a5));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002121 // The compare above could have been a SMI/SMI comparison. Guard against this
2122 // convincing us that we have a monomorphic JSFunction.
2123 __ JumpIfSmi(a1, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002125 // Increment the call count for monomorphic function calls.
2126 __ dsrl(t0, a3, 32 - kPointerSizeLog2);
2127 __ Daddu(a3, a2, Operand(t0));
2128 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
Ben Murdoch61f157c2016-09-16 13:49:30 +01002129 __ Daddu(t0, t0, Operand(Smi::FromInt(1)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002130 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002132 __ bind(&call_function);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002133 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
2134 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002135 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
2136 USE_DELAY_SLOT);
2137 __ li(a0, Operand(argc)); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002138
2139 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002141
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002142 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002143 __ Branch(&call, eq, a4, Operand(at));
2144
2145 // Verify that a4 contains an AllocationSite
2146 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset));
2147 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2148 __ Branch(&not_allocation_site, ne, a5, Operand(at));
2149
2150 HandleArrayCase(masm, &miss);
2151
2152 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002153
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002154 // The following cases attempt to handle MISS cases without going to the
2155 // runtime.
2156 if (FLAG_trace_ic) {
2157 __ Branch(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002158 }
2159
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002160 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex);
2161 __ Branch(&uninitialized, eq, a4, Operand(at));
2162
2163 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2164 // to handle it here. More complex cases are dealt with in the runtime.
2165 __ AssertNotSmi(a4);
2166 __ GetObjectType(a4, a5, a5);
2167 __ Branch(&miss, ne, a5, Operand(JS_FUNCTION_TYPE));
2168 __ dsrl(a4, a3, 32 - kPointerSizeLog2);
2169 __ Daddu(a4, a2, Operand(a4));
2170 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
2171 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002172
2173 __ bind(&call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002174 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002175 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
2176 USE_DELAY_SLOT);
2177 __ li(a0, Operand(argc)); // In delay slot.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002178
2179 __ bind(&uninitialized);
2180
2181 // We are going monomorphic, provided we actually have a JSFunction.
2182 __ JumpIfSmi(a1, &miss);
2183
2184 // Goto miss case if we do not have a function.
2185 __ GetObjectType(a1, a4, a4);
2186 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE));
2187
2188 // Make sure the function is not the Array() function, which requires special
2189 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002190 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002191 __ Branch(&miss, eq, a1, Operand(a4));
2192
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002193 // Make sure the function belongs to the same native context.
2194 __ ld(t0, FieldMemOperand(a1, JSFunction::kContextOffset));
2195 __ ld(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX));
2196 __ ld(t1, NativeContextMemOperand());
2197 __ Branch(&miss, ne, t0, Operand(t1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002198
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002199 // Initialize the call counter.
2200 __ dsrl(at, a3, 32 - kPointerSizeLog2);
2201 __ Daddu(at, a2, Operand(at));
Ben Murdoch61f157c2016-09-16 13:49:30 +01002202 __ li(t0, Operand(Smi::FromInt(1)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002203 __ sd(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002204
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002205 // Store the function. Use a stub since we need a frame for allocation.
2206 // a2 - vector
2207 // a3 - slot
2208 // a1 - function
2209 {
2210 FrameScope scope(masm, StackFrame::INTERNAL);
2211 CreateWeakCellStub create_stub(masm->isolate());
2212 __ Push(a1);
2213 __ CallStub(&create_stub);
2214 __ Pop(a1);
2215 }
2216
2217 __ Branch(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002218
2219 // We are here because tracing is on or we encountered a MISS case we can't
2220 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002221 __ bind(&miss);
2222 GenerateMiss(masm);
2223
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002224 __ Branch(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002225}
2226
2227
2228void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002229 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002230
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002231 // Push the receiver and the function and feedback info.
2232 __ Push(a1, a2, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002233
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002234 // Call the entry.
2235 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002236
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 // Move result to a1 and exit the internal frame.
2238 __ mov(a1, v0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002239}
2240
2241
2242void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002243 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002244 const RuntimeCallHelper& call_helper) {
2245 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2246
2247 // Index is not a smi.
2248 __ bind(&index_not_smi_);
2249 // If index is a heap number, try converting it to an integer.
2250 __ CheckMap(index_,
2251 result_,
2252 Heap::kHeapNumberMapRootIndex,
2253 index_not_number_,
2254 DONT_DO_SMI_CHECK);
2255 call_helper.BeforeCall(masm);
2256 // Consumed by runtime conversion function:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002257 if (embed_mode == PART_OF_IC_HANDLER) {
2258 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2259 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2260 } else {
2261 __ Push(object_, index_);
2262 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01002263 __ CallRuntime(Runtime::kNumberToSmi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002264
2265 // Save the conversion result before the pop instructions below
2266 // have a chance to overwrite it.
2267
2268 __ Move(index_, v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 if (embed_mode == PART_OF_IC_HANDLER) {
2270 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2271 LoadWithVectorDescriptor::SlotRegister(), object_);
2272 } else {
2273 __ pop(object_);
2274 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002275 // Reload the instance type.
2276 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2277 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2278 call_helper.AfterCall(masm);
2279 // If index is still not a smi, it must be out of range.
2280 __ JumpIfNotSmi(index_, index_out_of_range_);
2281 // Otherwise, return to the fast path.
2282 __ Branch(&got_smi_index_);
2283
2284 // Call runtime. We get here when the receiver is a string and the
2285 // index is a number, but the code of getting the actual character
2286 // is too complex (e.g., when the string needs to be flattened).
2287 __ bind(&call_runtime_);
2288 call_helper.BeforeCall(masm);
2289 __ SmiTag(index_);
2290 __ Push(object_, index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002291 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002292
2293 __ Move(result_, v0);
2294
2295 call_helper.AfterCall(masm);
2296 __ jmp(&exit_);
2297
2298 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2299}
2300
2301
2302// -------------------------------------------------------------------------
2303// StringCharFromCodeGenerator
2304
2305void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2306 // Fast case of Heap::LookupSingleCharacterStringFromCode.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002307 __ JumpIfNotSmi(code_, &slow_case_);
2308 __ Branch(&slow_case_, hi, code_,
2309 Operand(Smi::FromInt(String::kMaxOneByteCharCode)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002310
2311 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2312 // At this point code register contains smi tagged one_byte char code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002313 __ SmiScale(at, code_, kPointerSizeLog2);
2314 __ Daddu(result_, result_, at);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002315 __ ld(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002316 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2317 __ Branch(&slow_case_, eq, result_, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002318 __ bind(&exit_);
2319}
2320
2321
2322void StringCharFromCodeGenerator::GenerateSlow(
2323 MacroAssembler* masm,
2324 const RuntimeCallHelper& call_helper) {
2325 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2326
2327 __ bind(&slow_case_);
2328 call_helper.BeforeCall(masm);
2329 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002330 __ CallRuntime(Runtime::kStringCharFromCode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331 __ Move(result_, v0);
2332
2333 call_helper.AfterCall(masm);
2334 __ Branch(&exit_);
2335
2336 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2337}
2338
2339
2340enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
2341
2342
2343void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2344 Register dest,
2345 Register src,
2346 Register count,
2347 Register scratch,
2348 String::Encoding encoding) {
2349 if (FLAG_debug_code) {
2350 // Check that destination is word aligned.
2351 __ And(scratch, dest, Operand(kPointerAlignmentMask));
2352 __ Check(eq,
2353 kDestinationOfCopyNotAligned,
2354 scratch,
2355 Operand(zero_reg));
2356 }
2357
2358 // Assumes word reads and writes are little endian.
2359 // Nothing to do for zero characters.
2360 Label done;
2361
2362 if (encoding == String::TWO_BYTE_ENCODING) {
2363 __ Daddu(count, count, count);
2364 }
2365
2366 Register limit = count; // Read until dest equals this.
2367 __ Daddu(limit, dest, Operand(count));
2368
2369 Label loop_entry, loop;
2370 // Copy bytes from src to dest until dest hits limit.
2371 __ Branch(&loop_entry);
2372 __ bind(&loop);
2373 __ lbu(scratch, MemOperand(src));
2374 __ daddiu(src, src, 1);
2375 __ sb(scratch, MemOperand(dest));
2376 __ daddiu(dest, dest, 1);
2377 __ bind(&loop_entry);
2378 __ Branch(&loop, lt, dest, Operand(limit));
2379
2380 __ bind(&done);
2381}
2382
2383
2384void SubStringStub::Generate(MacroAssembler* masm) {
2385 Label runtime;
2386 // Stack frame on entry.
2387 // ra: return address
2388 // sp[0]: to
2389 // sp[4]: from
2390 // sp[8]: string
2391
2392 // This stub is called from the native-call %_SubString(...), so
2393 // nothing can be assumed about the arguments. It is tested that:
2394 // "string" is a sequential string,
2395 // both "from" and "to" are smis, and
2396 // 0 <= from <= to <= string.length.
2397 // If any of these assumptions fail, we call the runtime system.
2398
2399 const int kToOffset = 0 * kPointerSize;
2400 const int kFromOffset = 1 * kPointerSize;
2401 const int kStringOffset = 2 * kPointerSize;
2402
2403 __ ld(a2, MemOperand(sp, kToOffset));
2404 __ ld(a3, MemOperand(sp, kFromOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002405
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002406 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002407
2408 // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
2409 // safe in this case.
2410 __ JumpIfNotSmi(a2, &runtime);
2411 __ JumpIfNotSmi(a3, &runtime);
2412 // Both a2 and a3 are untagged integers.
2413
2414 __ SmiUntag(a2, a2);
2415 __ SmiUntag(a3, a3);
2416 __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
2417
2418 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
2419 __ Dsubu(a2, a2, a3);
2420
2421 // Make sure first argument is a string.
2422 __ ld(v0, MemOperand(sp, kStringOffset));
2423 __ JumpIfSmi(v0, &runtime);
2424 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2425 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2426 __ And(a4, a1, Operand(kIsNotStringMask));
2427
2428 __ Branch(&runtime, ne, a4, Operand(zero_reg));
2429
2430 Label single_char;
2431 __ Branch(&single_char, eq, a2, Operand(1));
2432
2433 // Short-cut for the case of trivial substring.
2434 Label return_v0;
2435 // v0: original string
2436 // a2: result string length
2437 __ ld(a4, FieldMemOperand(v0, String::kLengthOffset));
2438 __ SmiUntag(a4);
2439 // Return original string.
2440 __ Branch(&return_v0, eq, a2, Operand(a4));
2441 // Longer than original string's length or negative: unsafe arguments.
2442 __ Branch(&runtime, hi, a2, Operand(a4));
2443 // Shorter than original string's length: an actual substring.
2444
2445 // Deal with different string types: update the index if necessary
2446 // and put the underlying string into a5.
2447 // v0: original string
2448 // a1: instance type
2449 // a2: length
2450 // a3: from index (untagged)
2451 Label underlying_unpacked, sliced_string, seq_or_external_string;
2452 // If the string is not indirect, it can only be sequential or external.
2453 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2454 STATIC_ASSERT(kIsIndirectStringMask != 0);
2455 __ And(a4, a1, Operand(kIsIndirectStringMask));
2456 __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, a4, Operand(zero_reg));
2457 // a4 is used as a scratch register and can be overwritten in either case.
2458 __ And(a4, a1, Operand(kSlicedNotConsMask));
2459 __ Branch(&sliced_string, ne, a4, Operand(zero_reg));
2460 // Cons string. Check whether it is flat, then fetch first part.
2461 __ ld(a5, FieldMemOperand(v0, ConsString::kSecondOffset));
2462 __ LoadRoot(a4, Heap::kempty_stringRootIndex);
2463 __ Branch(&runtime, ne, a5, Operand(a4));
2464 __ ld(a5, FieldMemOperand(v0, ConsString::kFirstOffset));
2465 // Update instance type.
2466 __ ld(a1, FieldMemOperand(a5, HeapObject::kMapOffset));
2467 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2468 __ jmp(&underlying_unpacked);
2469
2470 __ bind(&sliced_string);
2471 // Sliced string. Fetch parent and correct start index by offset.
2472 __ ld(a5, FieldMemOperand(v0, SlicedString::kParentOffset));
2473 __ ld(a4, FieldMemOperand(v0, SlicedString::kOffsetOffset));
2474 __ SmiUntag(a4); // Add offset to index.
2475 __ Daddu(a3, a3, a4);
2476 // Update instance type.
2477 __ ld(a1, FieldMemOperand(a5, HeapObject::kMapOffset));
2478 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2479 __ jmp(&underlying_unpacked);
2480
2481 __ bind(&seq_or_external_string);
2482 // Sequential or external string. Just move string to the expected register.
2483 __ mov(a5, v0);
2484
2485 __ bind(&underlying_unpacked);
2486
2487 if (FLAG_string_slices) {
2488 Label copy_routine;
2489 // a5: underlying subject string
2490 // a1: instance type of underlying subject string
2491 // a2: length
2492 // a3: adjusted start index (untagged)
2493 // Short slice. Copy instead of slicing.
2494 __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
2495 // Allocate new sliced string. At this point we do not reload the instance
2496 // type including the string encoding because we simply rely on the info
2497 // provided by the original string. It does not matter if the original
2498 // string's encoding is wrong because we always have to recheck encoding of
2499 // the newly created string's parent anyways due to externalized strings.
2500 Label two_byte_slice, set_slice_header;
2501 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
2502 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2503 __ And(a4, a1, Operand(kStringEncodingMask));
2504 __ Branch(&two_byte_slice, eq, a4, Operand(zero_reg));
2505 __ AllocateOneByteSlicedString(v0, a2, a6, a7, &runtime);
2506 __ jmp(&set_slice_header);
2507 __ bind(&two_byte_slice);
2508 __ AllocateTwoByteSlicedString(v0, a2, a6, a7, &runtime);
2509 __ bind(&set_slice_header);
2510 __ SmiTag(a3);
2511 __ sd(a5, FieldMemOperand(v0, SlicedString::kParentOffset));
2512 __ sd(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
2513 __ jmp(&return_v0);
2514
2515 __ bind(&copy_routine);
2516 }
2517
2518 // a5: underlying subject string
2519 // a1: instance type of underlying subject string
2520 // a2: length
2521 // a3: adjusted start index (untagged)
2522 Label two_byte_sequential, sequential_string, allocate_result;
2523 STATIC_ASSERT(kExternalStringTag != 0);
2524 STATIC_ASSERT(kSeqStringTag == 0);
2525 __ And(a4, a1, Operand(kExternalStringTag));
2526 __ Branch(&sequential_string, eq, a4, Operand(zero_reg));
2527
2528 // Handle external string.
2529 // Rule out short external strings.
2530 STATIC_ASSERT(kShortExternalStringTag != 0);
2531 __ And(a4, a1, Operand(kShortExternalStringTag));
2532 __ Branch(&runtime, ne, a4, Operand(zero_reg));
2533 __ ld(a5, FieldMemOperand(a5, ExternalString::kResourceDataOffset));
2534 // a5 already points to the first character of underlying string.
2535 __ jmp(&allocate_result);
2536
2537 __ bind(&sequential_string);
2538 // Locate first character of underlying subject string.
2539 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2540 __ Daddu(a5, a5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2541
2542 __ bind(&allocate_result);
2543 // Sequential acii string. Allocate the result.
2544 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
2545 __ And(a4, a1, Operand(kStringEncodingMask));
2546 __ Branch(&two_byte_sequential, eq, a4, Operand(zero_reg));
2547
2548 // Allocate and copy the resulting one_byte string.
2549 __ AllocateOneByteString(v0, a2, a4, a6, a7, &runtime);
2550
2551 // Locate first character of substring to copy.
2552 __ Daddu(a5, a5, a3);
2553
2554 // Locate first character of result.
2555 __ Daddu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2556
2557 // v0: result string
2558 // a1: first character of result string
2559 // a2: result string length
2560 // a5: first character of substring to copy
2561 STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2562 StringHelper::GenerateCopyCharacters(
2563 masm, a1, a5, a2, a3, String::ONE_BYTE_ENCODING);
2564 __ jmp(&return_v0);
2565
2566 // Allocate and copy the resulting two-byte string.
2567 __ bind(&two_byte_sequential);
2568 __ AllocateTwoByteString(v0, a2, a4, a6, a7, &runtime);
2569
2570 // Locate first character of substring to copy.
2571 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002572 __ Dlsa(a5, a5, a3, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002573 // Locate first character of result.
2574 __ Daddu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2575
2576 // v0: result string.
2577 // a1: first character of result.
2578 // a2: result length.
2579 // a5: first character of substring to copy.
2580 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2581 StringHelper::GenerateCopyCharacters(
2582 masm, a1, a5, a2, a3, String::TWO_BYTE_ENCODING);
2583
2584 __ bind(&return_v0);
2585 Counters* counters = isolate()->counters();
2586 __ IncrementCounter(counters->sub_string_native(), 1, a3, a4);
2587 __ DropAndRet(3);
2588
2589 // Just jump to runtime to create the sub string.
2590 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002591 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002592
2593 __ bind(&single_char);
2594 // v0: original string
2595 // a1: instance type
2596 // a2: length
2597 // a3: from index (untagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002598 __ SmiTag(a3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002599 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
Ben Murdoch61f157c2016-09-16 13:49:30 +01002600 RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601 generator.GenerateFast(masm);
2602 __ DropAndRet(3);
2603 generator.SkipSlow(masm, &runtime);
2604}
2605
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002606void ToStringStub::Generate(MacroAssembler* masm) {
2607 // The ToString stub takes on argument in a0.
2608 Label is_number;
2609 __ JumpIfSmi(a0, &is_number);
2610
2611 Label not_string;
2612 __ GetObjectType(a0, a1, a1);
2613 // a0: receiver
2614 // a1: receiver instance type
2615 __ Branch(&not_string, ge, a1, Operand(FIRST_NONSTRING_TYPE));
2616 __ Ret(USE_DELAY_SLOT);
2617 __ mov(v0, a0);
2618 __ bind(&not_string);
2619
2620 Label not_heap_number;
2621 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2622 __ bind(&is_number);
2623 NumberToStringStub stub(isolate());
2624 __ TailCallStub(&stub);
2625 __ bind(&not_heap_number);
2626
2627 Label not_oddball;
2628 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2629 __ Ret(USE_DELAY_SLOT);
2630 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
2631 __ bind(&not_oddball);
2632
2633 __ push(a0); // Push argument.
2634 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002635}
2636
2637
Ben Murdoch097c5b22016-05-18 11:27:45 +01002638void ToNameStub::Generate(MacroAssembler* masm) {
2639 // The ToName stub takes on argument in a0.
2640 Label is_number;
2641 __ JumpIfSmi(a0, &is_number);
2642
2643 Label not_name;
2644 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2645 __ GetObjectType(a0, a1, a1);
2646 // a0: receiver
2647 // a1: receiver instance type
2648 __ Branch(&not_name, gt, a1, Operand(LAST_NAME_TYPE));
2649 __ Ret(USE_DELAY_SLOT);
2650 __ mov(v0, a0);
2651 __ bind(&not_name);
2652
2653 Label not_heap_number;
2654 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2655 __ bind(&is_number);
2656 NumberToStringStub stub(isolate());
2657 __ TailCallStub(&stub);
2658 __ bind(&not_heap_number);
2659
2660 Label not_oddball;
2661 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2662 __ Ret(USE_DELAY_SLOT);
2663 __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
2664 __ bind(&not_oddball);
2665
2666 __ push(a0); // Push argument.
2667 __ TailCallRuntime(Runtime::kToName);
2668}
2669
2670
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002671void StringHelper::GenerateFlatOneByteStringEquals(
2672 MacroAssembler* masm, Register left, Register right, Register scratch1,
2673 Register scratch2, Register scratch3) {
2674 Register length = scratch1;
2675
2676 // Compare lengths.
2677 Label strings_not_equal, check_zero_length;
2678 __ ld(length, FieldMemOperand(left, String::kLengthOffset));
2679 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset));
2680 __ Branch(&check_zero_length, eq, length, Operand(scratch2));
2681 __ bind(&strings_not_equal);
2682 // Can not put li in delayslot, it has multi instructions.
2683 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL)));
2684 __ Ret();
2685
2686 // Check if the length is zero.
2687 Label compare_chars;
2688 __ bind(&check_zero_length);
2689 STATIC_ASSERT(kSmiTag == 0);
2690 __ Branch(&compare_chars, ne, length, Operand(zero_reg));
2691 DCHECK(is_int16((intptr_t)Smi::FromInt(EQUAL)));
2692 __ Ret(USE_DELAY_SLOT);
2693 __ li(v0, Operand(Smi::FromInt(EQUAL)));
2694
2695 // Compare characters.
2696 __ bind(&compare_chars);
2697
2698 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, scratch3,
2699 v0, &strings_not_equal);
2700
2701 // Characters are equal.
2702 __ Ret(USE_DELAY_SLOT);
2703 __ li(v0, Operand(Smi::FromInt(EQUAL)));
2704}
2705
2706
2707void StringHelper::GenerateCompareFlatOneByteStrings(
2708 MacroAssembler* masm, Register left, Register right, Register scratch1,
2709 Register scratch2, Register scratch3, Register scratch4) {
2710 Label result_not_equal, compare_lengths;
2711 // Find minimum length and length difference.
2712 __ ld(scratch1, FieldMemOperand(left, String::kLengthOffset));
2713 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset));
2714 __ Dsubu(scratch3, scratch1, Operand(scratch2));
2715 Register length_delta = scratch3;
2716 __ slt(scratch4, scratch2, scratch1);
2717 __ Movn(scratch1, scratch2, scratch4);
2718 Register min_length = scratch1;
2719 STATIC_ASSERT(kSmiTag == 0);
2720 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg));
2721
2722 // Compare loop.
2723 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2724 scratch4, v0, &result_not_equal);
2725
2726 // Compare lengths - strings up to min-length are equal.
2727 __ bind(&compare_lengths);
2728 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
2729 // Use length_delta as result if it's zero.
2730 __ mov(scratch2, length_delta);
2731 __ mov(scratch4, zero_reg);
2732 __ mov(v0, zero_reg);
2733
2734 __ bind(&result_not_equal);
2735 // Conditionally update the result based either on length_delta or
2736 // the last comparion performed in the loop above.
2737 Label ret;
2738 __ Branch(&ret, eq, scratch2, Operand(scratch4));
2739 __ li(v0, Operand(Smi::FromInt(GREATER)));
2740 __ Branch(&ret, gt, scratch2, Operand(scratch4));
2741 __ li(v0, Operand(Smi::FromInt(LESS)));
2742 __ bind(&ret);
2743 __ Ret();
2744}
2745
2746
2747void StringHelper::GenerateOneByteCharsCompareLoop(
2748 MacroAssembler* masm, Register left, Register right, Register length,
2749 Register scratch1, Register scratch2, Register scratch3,
2750 Label* chars_not_equal) {
2751 // Change index to run from -length to -1 by adding length to string
2752 // start. This means that loop ends when index reaches zero, which
2753 // doesn't need an additional compare.
2754 __ SmiUntag(length);
2755 __ Daddu(scratch1, length,
2756 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2757 __ Daddu(left, left, Operand(scratch1));
2758 __ Daddu(right, right, Operand(scratch1));
2759 __ Dsubu(length, zero_reg, length);
2760 Register index = length; // index = -length;
2761
2762
2763 // Compare loop.
2764 Label loop;
2765 __ bind(&loop);
2766 __ Daddu(scratch3, left, index);
2767 __ lbu(scratch1, MemOperand(scratch3));
2768 __ Daddu(scratch3, right, index);
2769 __ lbu(scratch2, MemOperand(scratch3));
2770 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2));
2771 __ Daddu(index, index, 1);
2772 __ Branch(&loop, ne, index, Operand(zero_reg));
2773}
2774
2775
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002776void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2777 // ----------- S t a t e -------------
2778 // -- a1 : left
2779 // -- a0 : right
2780 // -- ra : return address
2781 // -----------------------------------
2782
2783 // Load a2 with the allocation site. We stick an undefined dummy value here
2784 // and replace it with the real allocation site later when we instantiate this
2785 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
Ben Murdoch61f157c2016-09-16 13:49:30 +01002786 __ li(a2, isolate()->factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002787
2788 // Make sure that we actually patched the allocation site.
2789 if (FLAG_debug_code) {
2790 __ And(at, a2, Operand(kSmiTagMask));
2791 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg));
2792 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset));
2793 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2794 __ Assert(eq, kExpectedAllocationSite, a4, Operand(at));
2795 }
2796
2797 // Tail call into the stub that handles binary operations with allocation
2798 // sites.
2799 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2800 __ TailCallStub(&stub);
2801}
2802
2803
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002804void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2805 DCHECK_EQ(CompareICState::BOOLEAN, state());
2806 Label miss;
2807
2808 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
2809 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002810 if (!Token::IsEqualityOp(op())) {
2811 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset));
2812 __ AssertSmi(a1);
2813 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset));
2814 __ AssertSmi(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002815 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002816 __ Ret(USE_DELAY_SLOT);
2817 __ Dsubu(v0, a1, a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002818
2819 __ bind(&miss);
2820 GenerateMiss(masm);
2821}
2822
2823
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002824void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2825 DCHECK(state() == CompareICState::SMI);
2826 Label miss;
2827 __ Or(a2, a1, a0);
2828 __ JumpIfNotSmi(a2, &miss);
2829
2830 if (GetCondition() == eq) {
2831 // For equality we do not care about the sign of the result.
2832 __ Ret(USE_DELAY_SLOT);
2833 __ Dsubu(v0, a0, a1);
2834 } else {
2835 // Untag before subtracting to avoid handling overflow.
2836 __ SmiUntag(a1);
2837 __ SmiUntag(a0);
2838 __ Ret(USE_DELAY_SLOT);
2839 __ Dsubu(v0, a1, a0);
2840 }
2841
2842 __ bind(&miss);
2843 GenerateMiss(masm);
2844}
2845
2846
2847void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2848 DCHECK(state() == CompareICState::NUMBER);
2849
2850 Label generic_stub;
2851 Label unordered, maybe_undefined1, maybe_undefined2;
2852 Label miss;
2853
2854 if (left() == CompareICState::SMI) {
2855 __ JumpIfNotSmi(a1, &miss);
2856 }
2857 if (right() == CompareICState::SMI) {
2858 __ JumpIfNotSmi(a0, &miss);
2859 }
2860
2861 // Inlining the double comparison and falling back to the general compare
2862 // stub if NaN is involved.
2863 // Load left and right operand.
2864 Label done, left, left_smi, right_smi;
2865 __ JumpIfSmi(a0, &right_smi);
2866 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
2867 DONT_DO_SMI_CHECK);
2868 __ Dsubu(a2, a0, Operand(kHeapObjectTag));
2869 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset));
2870 __ Branch(&left);
2871 __ bind(&right_smi);
2872 __ SmiUntag(a2, a0); // Can't clobber a0 yet.
2873 FPURegister single_scratch = f6;
2874 __ mtc1(a2, single_scratch);
2875 __ cvt_d_w(f2, single_scratch);
2876
2877 __ bind(&left);
2878 __ JumpIfSmi(a1, &left_smi);
2879 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
2880 DONT_DO_SMI_CHECK);
2881 __ Dsubu(a2, a1, Operand(kHeapObjectTag));
2882 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset));
2883 __ Branch(&done);
2884 __ bind(&left_smi);
2885 __ SmiUntag(a2, a1); // Can't clobber a1 yet.
2886 single_scratch = f8;
2887 __ mtc1(a2, single_scratch);
2888 __ cvt_d_w(f0, single_scratch);
2889
2890 __ bind(&done);
2891
2892 // Return a result of -1, 0, or 1, or use CompareStub for NaNs.
2893 Label fpu_eq, fpu_lt;
2894 // Test if equal, and also handle the unordered/NaN case.
2895 __ BranchF(&fpu_eq, &unordered, eq, f0, f2);
2896
2897 // Test if less (unordered case is already handled).
2898 __ BranchF(&fpu_lt, NULL, lt, f0, f2);
2899
2900 // Otherwise it's greater, so just fall thru, and return.
2901 DCHECK(is_int16(GREATER) && is_int16(EQUAL) && is_int16(LESS));
2902 __ Ret(USE_DELAY_SLOT);
2903 __ li(v0, Operand(GREATER));
2904
2905 __ bind(&fpu_eq);
2906 __ Ret(USE_DELAY_SLOT);
2907 __ li(v0, Operand(EQUAL));
2908
2909 __ bind(&fpu_lt);
2910 __ Ret(USE_DELAY_SLOT);
2911 __ li(v0, Operand(LESS));
2912
2913 __ bind(&unordered);
2914 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002915 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002916 CompareICState::GENERIC, CompareICState::GENERIC);
2917 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2918
2919 __ bind(&maybe_undefined1);
2920 if (Token::IsOrderedRelationalCompareOp(op())) {
2921 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2922 __ Branch(&miss, ne, a0, Operand(at));
2923 __ JumpIfSmi(a1, &unordered);
2924 __ GetObjectType(a1, a2, a2);
2925 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
2926 __ jmp(&unordered);
2927 }
2928
2929 __ bind(&maybe_undefined2);
2930 if (Token::IsOrderedRelationalCompareOp(op())) {
2931 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2932 __ Branch(&unordered, eq, a1, Operand(at));
2933 }
2934
2935 __ bind(&miss);
2936 GenerateMiss(masm);
2937}
2938
2939
2940void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2941 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2942 Label miss;
2943
2944 // Registers containing left and right operands respectively.
2945 Register left = a1;
2946 Register right = a0;
2947 Register tmp1 = a2;
2948 Register tmp2 = a3;
2949
2950 // Check that both operands are heap objects.
2951 __ JumpIfEitherSmi(left, right, &miss);
2952
2953 // Check that both operands are internalized strings.
2954 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
2955 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
2956 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
2957 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
2958 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2959 __ Or(tmp1, tmp1, Operand(tmp2));
2960 __ And(at, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask));
2961 __ Branch(&miss, ne, at, Operand(zero_reg));
2962
2963 // Make sure a0 is non-zero. At this point input operands are
2964 // guaranteed to be non-zero.
2965 DCHECK(right.is(a0));
2966 STATIC_ASSERT(EQUAL == 0);
2967 STATIC_ASSERT(kSmiTag == 0);
2968 __ mov(v0, right);
2969 // Internalized strings are compared by identity.
2970 __ Ret(ne, left, Operand(right));
2971 DCHECK(is_int16(EQUAL));
2972 __ Ret(USE_DELAY_SLOT);
2973 __ li(v0, Operand(Smi::FromInt(EQUAL)));
2974
2975 __ bind(&miss);
2976 GenerateMiss(masm);
2977}
2978
2979
2980void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2981 DCHECK(state() == CompareICState::UNIQUE_NAME);
2982 DCHECK(GetCondition() == eq);
2983 Label miss;
2984
2985 // Registers containing left and right operands respectively.
2986 Register left = a1;
2987 Register right = a0;
2988 Register tmp1 = a2;
2989 Register tmp2 = a3;
2990
2991 // Check that both operands are heap objects.
2992 __ JumpIfEitherSmi(left, right, &miss);
2993
2994 // Check that both operands are unique names. This leaves the instance
2995 // types loaded in tmp1 and tmp2.
2996 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
2997 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
2998 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
2999 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3000
3001 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
3002 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
3003
3004 // Use a0 as result
3005 __ mov(v0, a0);
3006
3007 // Unique names are compared by identity.
3008 Label done;
3009 __ Branch(&done, ne, left, Operand(right));
3010 // Make sure a0 is non-zero. At this point input operands are
3011 // guaranteed to be non-zero.
3012 DCHECK(right.is(a0));
3013 STATIC_ASSERT(EQUAL == 0);
3014 STATIC_ASSERT(kSmiTag == 0);
3015 __ li(v0, Operand(Smi::FromInt(EQUAL)));
3016 __ bind(&done);
3017 __ Ret();
3018
3019 __ bind(&miss);
3020 GenerateMiss(masm);
3021}
3022
3023
3024void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3025 DCHECK(state() == CompareICState::STRING);
3026 Label miss;
3027
3028 bool equality = Token::IsEqualityOp(op());
3029
3030 // Registers containing left and right operands respectively.
3031 Register left = a1;
3032 Register right = a0;
3033 Register tmp1 = a2;
3034 Register tmp2 = a3;
3035 Register tmp3 = a4;
3036 Register tmp4 = a5;
3037 Register tmp5 = a6;
3038
3039 // Check that both operands are heap objects.
3040 __ JumpIfEitherSmi(left, right, &miss);
3041
3042 // Check that both operands are strings. This leaves the instance
3043 // types loaded in tmp1 and tmp2.
3044 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3045 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3046 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3047 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3048 STATIC_ASSERT(kNotStringTag != 0);
3049 __ Or(tmp3, tmp1, tmp2);
3050 __ And(tmp5, tmp3, Operand(kIsNotStringMask));
3051 __ Branch(&miss, ne, tmp5, Operand(zero_reg));
3052
3053 // Fast check for identical strings.
3054 Label left_ne_right;
3055 STATIC_ASSERT(EQUAL == 0);
3056 STATIC_ASSERT(kSmiTag == 0);
3057 __ Branch(&left_ne_right, ne, left, Operand(right));
3058 __ Ret(USE_DELAY_SLOT);
3059 __ mov(v0, zero_reg); // In the delay slot.
3060 __ bind(&left_ne_right);
3061
3062 // Handle not identical strings.
3063
3064 // Check that both strings are internalized strings. If they are, we're done
3065 // because we already know they are not identical. We know they are both
3066 // strings.
3067 if (equality) {
3068 DCHECK(GetCondition() == eq);
3069 STATIC_ASSERT(kInternalizedTag == 0);
3070 __ Or(tmp3, tmp1, Operand(tmp2));
3071 __ And(tmp5, tmp3, Operand(kIsNotInternalizedMask));
3072 Label is_symbol;
3073 __ Branch(&is_symbol, ne, tmp5, Operand(zero_reg));
3074 // Make sure a0 is non-zero. At this point input operands are
3075 // guaranteed to be non-zero.
3076 DCHECK(right.is(a0));
3077 __ Ret(USE_DELAY_SLOT);
3078 __ mov(v0, a0); // In the delay slot.
3079 __ bind(&is_symbol);
3080 }
3081
3082 // Check that both strings are sequential one_byte.
3083 Label runtime;
3084 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
3085 &runtime);
3086
3087 // Compare flat one_byte strings. Returns when done.
3088 if (equality) {
3089 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, tmp2,
3090 tmp3);
3091 } else {
3092 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3093 tmp2, tmp3, tmp4);
3094 }
3095
3096 // Handle more complex cases in runtime.
3097 __ bind(&runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003098 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01003099 {
3100 FrameScope scope(masm, StackFrame::INTERNAL);
3101 __ Push(left, right);
3102 __ CallRuntime(Runtime::kStringEqual);
3103 }
3104 __ LoadRoot(a0, Heap::kTrueValueRootIndex);
3105 __ Ret(USE_DELAY_SLOT);
3106 __ Subu(v0, v0, a0); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003107 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003108 __ Push(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003109 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003110 }
3111
3112 __ bind(&miss);
3113 GenerateMiss(masm);
3114}
3115
3116
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003117void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3118 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003119 Label miss;
3120 __ And(a2, a1, Operand(a0));
3121 __ JumpIfSmi(a2, &miss);
3122
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003123 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003124 __ GetObjectType(a0, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003125 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003126 __ GetObjectType(a1, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003127 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003128
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003129 DCHECK_EQ(eq, GetCondition());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003130 __ Ret(USE_DELAY_SLOT);
3131 __ dsubu(v0, a0, a1);
3132
3133 __ bind(&miss);
3134 GenerateMiss(masm);
3135}
3136
3137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003138void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003139 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003140 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003141 __ And(a2, a1, a0);
3142 __ JumpIfSmi(a2, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003143 __ GetWeakValue(a4, cell);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003144 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
3145 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003146 __ Branch(&miss, ne, a2, Operand(a4));
3147 __ Branch(&miss, ne, a3, Operand(a4));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003148
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003149 if (Token::IsEqualityOp(op())) {
3150 __ Ret(USE_DELAY_SLOT);
3151 __ dsubu(v0, a0, a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003152 } else {
3153 if (op() == Token::LT || op() == Token::LTE) {
3154 __ li(a2, Operand(Smi::FromInt(GREATER)));
3155 } else {
3156 __ li(a2, Operand(Smi::FromInt(LESS)));
3157 }
3158 __ Push(a1, a0, a2);
3159 __ TailCallRuntime(Runtime::kCompare);
3160 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003161
3162 __ bind(&miss);
3163 GenerateMiss(masm);
3164}
3165
3166
3167void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3168 {
3169 // Call the runtime system in a fresh internal frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003170 FrameScope scope(masm, StackFrame::INTERNAL);
3171 __ Push(a1, a0);
3172 __ Push(ra, a1, a0);
3173 __ li(a4, Operand(Smi::FromInt(op())));
3174 __ daddiu(sp, sp, -kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003175 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs,
3176 USE_DELAY_SLOT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003177 __ sd(a4, MemOperand(sp)); // In the delay slot.
3178 // Compute the entry point of the rewritten stub.
3179 __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
3180 // Restore registers.
3181 __ Pop(a1, a0, ra);
3182 }
3183 __ Jump(a2);
3184}
3185
3186
3187void DirectCEntryStub::Generate(MacroAssembler* masm) {
3188 // Make place for arguments to fit C calling convention. Most of the callers
3189 // of DirectCEntryStub::GenerateCall are using EnterExitFrame/LeaveExitFrame
3190 // so they handle stack restoring and we don't have to do that here.
3191 // Any caller of DirectCEntryStub::GenerateCall must take care of dropping
3192 // kCArgsSlotsSize stack space after the call.
3193 __ daddiu(sp, sp, -kCArgsSlotsSize);
3194 // Place the return address on the stack, making the call
3195 // GC safe. The RegExp backend also relies on this.
3196 __ sd(ra, MemOperand(sp, kCArgsSlotsSize));
3197 __ Call(t9); // Call the C++ function.
3198 __ ld(t9, MemOperand(sp, kCArgsSlotsSize));
3199
3200 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
3201 // In case of an error the return address may point to a memory area
3202 // filled with kZapValue by the GC.
3203 // Dereference the address and check for this.
3204 __ Uld(a4, MemOperand(t9));
3205 __ Assert(ne, kReceivedInvalidReturnAddress, a4,
3206 Operand(reinterpret_cast<uint64_t>(kZapValue)));
3207 }
3208 __ Jump(t9);
3209}
3210
3211
3212void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
3213 Register target) {
3214 intptr_t loc =
3215 reinterpret_cast<intptr_t>(GetCode().location());
3216 __ Move(t9, target);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003217 __ li(at, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE);
3218 __ Call(at);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003219}
3220
3221
3222void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3223 Label* miss,
3224 Label* done,
3225 Register receiver,
3226 Register properties,
3227 Handle<Name> name,
3228 Register scratch0) {
3229 DCHECK(name->IsUniqueName());
3230 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3231 // not equal to the name and kProbes-th slot is not used (its name is the
3232 // undefined value), it guarantees the hash table doesn't contain the
3233 // property. It's true even if some slots represent deleted properties
3234 // (their names are the hole value).
3235 for (int i = 0; i < kInlinedProbes; i++) {
3236 // scratch0 points to properties hash.
3237 // Compute the masked index: (hash + i + i * i) & mask.
3238 Register index = scratch0;
3239 // Capacity is smi 2^n.
3240 __ SmiLoadUntag(index, FieldMemOperand(properties, kCapacityOffset));
3241 __ Dsubu(index, index, Operand(1));
3242 __ And(index, index,
3243 Operand(name->Hash() + NameDictionary::GetProbeOffset(i)));
3244
3245 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003246 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003247 __ Dlsa(index, index, index, 1); // index *= 3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003248
3249 Register entity_name = scratch0;
3250 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003251 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003252 Register tmp = properties;
3253
Ben Murdoch097c5b22016-05-18 11:27:45 +01003254 __ Dlsa(tmp, properties, index, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003255 __ ld(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
3256
3257 DCHECK(!tmp.is(entity_name));
3258 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3259 __ Branch(done, eq, entity_name, Operand(tmp));
3260
3261 // Load the hole ready for use below:
3262 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
3263
3264 // Stop if found the property.
3265 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name)));
3266
3267 Label good;
3268 __ Branch(&good, eq, entity_name, Operand(tmp));
3269
3270 // Check if the entry name is not a unique name.
3271 __ ld(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
3272 __ lbu(entity_name,
3273 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
3274 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3275 __ bind(&good);
3276
3277 // Restore the properties.
3278 __ ld(properties,
3279 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
3280 }
3281
3282 const int spill_mask =
3283 (ra.bit() | a6.bit() | a5.bit() | a4.bit() | a3.bit() |
3284 a2.bit() | a1.bit() | a0.bit() | v0.bit());
3285
3286 __ MultiPush(spill_mask);
3287 __ ld(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
3288 __ li(a1, Operand(Handle<Name>(name)));
3289 NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
3290 __ CallStub(&stub);
3291 __ mov(at, v0);
3292 __ MultiPop(spill_mask);
3293
3294 __ Branch(done, eq, at, Operand(zero_reg));
3295 __ Branch(miss, ne, at, Operand(zero_reg));
3296}
3297
3298
3299// Probe the name dictionary in the |elements| register. Jump to the
3300// |done| label if a property with the given name is found. Jump to
3301// the |miss| label otherwise.
3302// If lookup was successful |scratch2| will be equal to elements + 4 * index.
3303void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3304 Label* miss,
3305 Label* done,
3306 Register elements,
3307 Register name,
3308 Register scratch1,
3309 Register scratch2) {
3310 DCHECK(!elements.is(scratch1));
3311 DCHECK(!elements.is(scratch2));
3312 DCHECK(!name.is(scratch1));
3313 DCHECK(!name.is(scratch2));
3314
3315 __ AssertName(name);
3316
3317 // Compute the capacity mask.
3318 __ ld(scratch1, FieldMemOperand(elements, kCapacityOffset));
3319 __ SmiUntag(scratch1);
3320 __ Dsubu(scratch1, scratch1, Operand(1));
3321
3322 // Generate an unrolled loop that performs a few probes before
3323 // giving up. Measurements done on Gmail indicate that 2 probes
3324 // cover ~93% of loads from dictionaries.
3325 for (int i = 0; i < kInlinedProbes; i++) {
3326 // Compute the masked index: (hash + i + i * i) & mask.
3327 __ lwu(scratch2, FieldMemOperand(name, Name::kHashFieldOffset));
3328 if (i > 0) {
3329 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3330 // the hash in a separate instruction. The value hash + i + i * i is right
3331 // shifted in the following and instruction.
3332 DCHECK(NameDictionary::GetProbeOffset(i) <
3333 1 << (32 - Name::kHashFieldOffset));
3334 __ Daddu(scratch2, scratch2, Operand(
3335 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
3336 }
3337 __ dsrl(scratch2, scratch2, Name::kHashShift);
3338 __ And(scratch2, scratch1, scratch2);
3339
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003340 // Scale the index by multiplying by the entry size.
3341 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003342 // scratch2 = scratch2 * 3.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003343 __ Dlsa(scratch2, scratch2, scratch2, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003344
3345 // Check if the key is identical to the name.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003346 __ Dlsa(scratch2, elements, scratch2, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003347 __ ld(at, FieldMemOperand(scratch2, kElementsStartOffset));
3348 __ Branch(done, eq, name, Operand(at));
3349 }
3350
3351 const int spill_mask =
3352 (ra.bit() | a6.bit() | a5.bit() | a4.bit() |
3353 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
3354 ~(scratch1.bit() | scratch2.bit());
3355
3356 __ MultiPush(spill_mask);
3357 if (name.is(a0)) {
3358 DCHECK(!elements.is(a1));
3359 __ Move(a1, name);
3360 __ Move(a0, elements);
3361 } else {
3362 __ Move(a0, elements);
3363 __ Move(a1, name);
3364 }
3365 NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
3366 __ CallStub(&stub);
3367 __ mov(scratch2, a2);
3368 __ mov(at, v0);
3369 __ MultiPop(spill_mask);
3370
3371 __ Branch(done, ne, at, Operand(zero_reg));
3372 __ Branch(miss, eq, at, Operand(zero_reg));
3373}
3374
3375
3376void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3377 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3378 // we cannot call anything that could cause a GC from this stub.
3379 // Registers:
3380 // result: NameDictionary to probe
3381 // a1: key
3382 // dictionary: NameDictionary to probe.
3383 // index: will hold an index of entry if lookup is successful.
3384 // might alias with result_.
3385 // Returns:
3386 // result_ is zero if lookup failed, non zero otherwise.
3387
3388 Register result = v0;
3389 Register dictionary = a0;
3390 Register key = a1;
3391 Register index = a2;
3392 Register mask = a3;
3393 Register hash = a4;
3394 Register undefined = a5;
3395 Register entry_key = a6;
3396
3397 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3398
3399 __ ld(mask, FieldMemOperand(dictionary, kCapacityOffset));
3400 __ SmiUntag(mask);
3401 __ Dsubu(mask, mask, Operand(1));
3402
3403 __ lwu(hash, FieldMemOperand(key, Name::kHashFieldOffset));
3404
3405 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3406
3407 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3408 // Compute the masked index: (hash + i + i * i) & mask.
3409 // Capacity is smi 2^n.
3410 if (i > 0) {
3411 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3412 // the hash in a separate instruction. The value hash + i + i * i is right
3413 // shifted in the following and instruction.
3414 DCHECK(NameDictionary::GetProbeOffset(i) <
3415 1 << (32 - Name::kHashFieldOffset));
3416 __ Daddu(index, hash, Operand(
3417 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
3418 } else {
3419 __ mov(index, hash);
3420 }
3421 __ dsrl(index, index, Name::kHashShift);
3422 __ And(index, mask, index);
3423
3424 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003425 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003426 // index *= 3.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003427 __ Dlsa(index, index, index, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003428
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003429 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003430 __ Dlsa(index, dictionary, index, kPointerSizeLog2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003431 __ ld(entry_key, FieldMemOperand(index, kElementsStartOffset));
3432
3433 // Having undefined at this place means the name is not contained.
3434 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
3435
3436 // Stop if found the property.
3437 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
3438
3439 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3440 // Check if the entry name is not a unique name.
3441 __ ld(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
3442 __ lbu(entry_key,
3443 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
3444 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
3445 }
3446 }
3447
3448 __ bind(&maybe_in_dictionary);
3449 // If we are doing negative lookup then probing failure should be
3450 // treated as a lookup success. For positive lookup probing failure
3451 // should be treated as lookup failure.
3452 if (mode() == POSITIVE_LOOKUP) {
3453 __ Ret(USE_DELAY_SLOT);
3454 __ mov(result, zero_reg);
3455 }
3456
3457 __ bind(&in_dictionary);
3458 __ Ret(USE_DELAY_SLOT);
3459 __ li(result, 1);
3460
3461 __ bind(&not_in_dictionary);
3462 __ Ret(USE_DELAY_SLOT);
3463 __ mov(result, zero_reg);
3464}
3465
3466
3467void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3468 Isolate* isolate) {
3469 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3470 stub1.GetCode();
3471 // Hydrogen code stubs need stub2 at snapshot time.
3472 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3473 stub2.GetCode();
3474}
3475
3476
3477// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3478// the value has just been written into the object, now this stub makes sure
3479// we keep the GC informed. The word in the object where the value has been
3480// written is in the address register.
3481void RecordWriteStub::Generate(MacroAssembler* masm) {
3482 Label skip_to_incremental_noncompacting;
3483 Label skip_to_incremental_compacting;
3484
3485 // The first two branch+nop instructions are generated with labels so as to
3486 // get the offset fixed up correctly by the bind(Label*) call. We patch it
3487 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
3488 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
3489 // incremental heap marking.
3490 // See RecordWriteStub::Patch for details.
3491 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
3492 __ nop();
3493 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
3494 __ nop();
3495
3496 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3497 __ RememberedSetHelper(object(),
3498 address(),
3499 value(),
3500 save_fp_regs_mode(),
3501 MacroAssembler::kReturnAtEnd);
3502 }
3503 __ Ret();
3504
3505 __ bind(&skip_to_incremental_noncompacting);
3506 GenerateIncremental(masm, INCREMENTAL);
3507
3508 __ bind(&skip_to_incremental_compacting);
3509 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3510
3511 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3512 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3513
3514 PatchBranchIntoNop(masm, 0);
3515 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
3516}
3517
3518
3519void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3520 regs_.Save(masm);
3521
3522 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3523 Label dont_need_remembered_set;
3524
3525 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0));
3526 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3527 regs_.scratch0(),
3528 &dont_need_remembered_set);
3529
Ben Murdoch097c5b22016-05-18 11:27:45 +01003530 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3531 &dont_need_remembered_set);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003532
3533 // First notify the incremental marker if necessary, then update the
3534 // remembered set.
3535 CheckNeedsToInformIncrementalMarker(
3536 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
3537 InformIncrementalMarker(masm);
3538 regs_.Restore(masm);
3539 __ RememberedSetHelper(object(),
3540 address(),
3541 value(),
3542 save_fp_regs_mode(),
3543 MacroAssembler::kReturnAtEnd);
3544
3545 __ bind(&dont_need_remembered_set);
3546 }
3547
3548 CheckNeedsToInformIncrementalMarker(
3549 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
3550 InformIncrementalMarker(masm);
3551 regs_.Restore(masm);
3552 __ Ret();
3553}
3554
3555
3556void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3557 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
3558 int argument_count = 3;
3559 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3560 Register address =
3561 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
3562 DCHECK(!address.is(regs_.object()));
3563 DCHECK(!address.is(a0));
3564 __ Move(address, regs_.address());
3565 __ Move(a0, regs_.object());
3566 __ Move(a1, address);
3567 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
3568
3569 AllowExternalCallThatCantCauseGC scope(masm);
3570 __ CallCFunction(
3571 ExternalReference::incremental_marking_record_write_function(isolate()),
3572 argument_count);
3573 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
3574}
3575
3576
3577void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3578 MacroAssembler* masm,
3579 OnNoNeedToInformIncrementalMarker on_no_need,
3580 Mode mode) {
3581 Label on_black;
3582 Label need_incremental;
3583 Label need_incremental_pop_scratch;
3584
3585 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
3586 __ ld(regs_.scratch1(),
3587 MemOperand(regs_.scratch0(),
3588 MemoryChunk::kWriteBarrierCounterOffset));
3589 __ Dsubu(regs_.scratch1(), regs_.scratch1(), Operand(1));
3590 __ sd(regs_.scratch1(),
3591 MemOperand(regs_.scratch0(),
3592 MemoryChunk::kWriteBarrierCounterOffset));
3593 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg));
3594
3595 // Let's look at the color of the object: If it is not black we don't have
3596 // to inform the incremental marker.
3597 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
3598
3599 regs_.Restore(masm);
3600 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3601 __ RememberedSetHelper(object(),
3602 address(),
3603 value(),
3604 save_fp_regs_mode(),
3605 MacroAssembler::kReturnAtEnd);
3606 } else {
3607 __ Ret();
3608 }
3609
3610 __ bind(&on_black);
3611
3612 // Get the value from the slot.
3613 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0));
3614
3615 if (mode == INCREMENTAL_COMPACTION) {
3616 Label ensure_not_white;
3617
3618 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3619 regs_.scratch1(), // Scratch.
3620 MemoryChunk::kEvacuationCandidateMask,
3621 eq,
3622 &ensure_not_white);
3623
3624 __ CheckPageFlag(regs_.object(),
3625 regs_.scratch1(), // Scratch.
3626 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3627 eq,
3628 &need_incremental);
3629
3630 __ bind(&ensure_not_white);
3631 }
3632
3633 // We need extra registers for this, so we push the object and the address
3634 // register temporarily.
3635 __ Push(regs_.object(), regs_.address());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003636 __ JumpIfWhite(regs_.scratch0(), // The value.
3637 regs_.scratch1(), // Scratch.
3638 regs_.object(), // Scratch.
3639 regs_.address(), // Scratch.
3640 &need_incremental_pop_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003641 __ Pop(regs_.object(), regs_.address());
3642
3643 regs_.Restore(masm);
3644 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
3645 __ RememberedSetHelper(object(),
3646 address(),
3647 value(),
3648 save_fp_regs_mode(),
3649 MacroAssembler::kReturnAtEnd);
3650 } else {
3651 __ Ret();
3652 }
3653
3654 __ bind(&need_incremental_pop_scratch);
3655 __ Pop(regs_.object(), regs_.address());
3656
3657 __ bind(&need_incremental);
3658
3659 // Fall through when we need to inform the incremental marker.
3660}
3661
3662
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003663void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3664 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3665 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3666 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003667 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003668 __ ld(a1, MemOperand(fp, parameter_count_offset));
3669 if (function_mode() == JS_FUNCTION_STUB_MODE) {
3670 __ Daddu(a1, a1, Operand(1));
3671 }
3672 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3673 __ dsll(a1, a1, kPointerSizeLog2);
3674 __ Ret(USE_DELAY_SLOT);
3675 __ Daddu(sp, sp, a1);
3676}
3677
3678
3679void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003680 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003681 LoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003682 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003683}
3684
3685
3686void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003687 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003688 KeyedLoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003689 stub.GenerateForTrampoline(masm);
3690}
3691
3692
3693void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3694 __ EmitLoadTypeFeedbackVector(a2);
3695 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003696 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3697}
3698
3699
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003700void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3701
3702
3703void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3704 GenerateImpl(masm, true);
3705}
3706
3707
3708static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3709 Register receiver_map, Register scratch1,
3710 Register scratch2, bool is_polymorphic,
3711 Label* miss) {
3712 // feedback initially contains the feedback array
3713 Label next_loop, prepare_next;
3714 Label start_polymorphic;
3715
3716 Register cached_map = scratch1;
3717
3718 __ ld(cached_map,
3719 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3720 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3721 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map));
3722 // found, now call handler.
3723 Register handler = feedback;
3724 __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3725 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3726 __ Jump(t9);
3727
3728 Register length = scratch2;
3729 __ bind(&start_polymorphic);
3730 __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset));
3731 if (!is_polymorphic) {
3732 // If the IC could be monomorphic we have to make sure we don't go past the
3733 // end of the feedback array.
3734 __ Branch(miss, eq, length, Operand(Smi::FromInt(2)));
3735 }
3736
3737 Register too_far = length;
3738 Register pointer_reg = feedback;
3739
3740 // +-----+------+------+-----+-----+ ... ----+
3741 // | map | len | wm0 | h0 | wm1 | hN |
3742 // +-----+------+------+-----+-----+ ... ----+
3743 // 0 1 2 len-1
3744 // ^ ^
3745 // | |
3746 // pointer_reg too_far
3747 // aka feedback scratch2
3748 // also need receiver_map
3749 // use cached_map (scratch1) to look in the weak map values.
3750 __ SmiScale(too_far, length, kPointerSizeLog2);
3751 __ Daddu(too_far, feedback, Operand(too_far));
3752 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3753 __ Daddu(pointer_reg, feedback,
3754 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag));
3755
3756 __ bind(&next_loop);
3757 __ ld(cached_map, MemOperand(pointer_reg));
3758 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3759 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
3760 __ ld(handler, MemOperand(pointer_reg, kPointerSize));
3761 __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3762 __ Jump(t9);
3763
3764 __ bind(&prepare_next);
3765 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2));
3766 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
3767
3768 // We exhausted our array of map handler pairs.
3769 __ Branch(miss);
3770}
3771
3772
3773static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3774 Register receiver_map, Register feedback,
3775 Register vector, Register slot,
3776 Register scratch, Label* compare_map,
3777 Label* load_smi_map, Label* try_array) {
3778 __ JumpIfSmi(receiver, load_smi_map);
3779 __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
3780 __ bind(compare_map);
3781 Register cached_map = scratch;
3782 // Move the weak map into the weak_cell register.
3783 __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset));
3784 __ Branch(try_array, ne, cached_map, Operand(receiver_map));
3785 Register handler = feedback;
3786 __ SmiScale(handler, slot, kPointerSizeLog2);
3787 __ Daddu(handler, vector, Operand(handler));
3788 __ ld(handler,
3789 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
3790 __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag);
3791 __ Jump(t9);
3792}
3793
3794
3795void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3796 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
3797 Register name = LoadWithVectorDescriptor::NameRegister(); // a2
3798 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
3799 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
3800 Register feedback = a4;
3801 Register receiver_map = a5;
3802 Register scratch1 = a6;
3803
3804 __ SmiScale(feedback, slot, kPointerSizeLog2);
3805 __ Daddu(feedback, vector, Operand(feedback));
3806 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3807
3808 // Try to quickly handle the monomorphic case without knowing for sure
3809 // if we have a weak cell in feedback. We do know it's safe to look
3810 // at WeakCell::kValueOffset.
3811 Label try_array, load_smi_map, compare_map;
3812 Label not_array, miss;
3813 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3814 scratch1, &compare_map, &load_smi_map, &try_array);
3815
3816 // Is it a fixed array?
3817 __ bind(&try_array);
3818 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3819 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
3820 __ Branch(&not_array, ne, scratch1, Operand(at));
3821 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, true, &miss);
3822
3823 __ bind(&not_array);
3824 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
3825 __ Branch(&miss, ne, feedback, Operand(at));
Ben Murdochc5610432016-08-08 18:44:38 +01003826 Code::Flags code_flags =
3827 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003828 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
3829 receiver, name, feedback,
3830 receiver_map, scratch1, a7);
3831
3832 __ bind(&miss);
3833 LoadIC::GenerateMiss(masm);
3834
3835 __ bind(&load_smi_map);
3836 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3837 __ Branch(&compare_map);
3838}
3839
3840
3841void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3842 GenerateImpl(masm, false);
3843}
3844
3845
3846void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3847 GenerateImpl(masm, true);
3848}
3849
3850
3851void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3852 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
3853 Register key = LoadWithVectorDescriptor::NameRegister(); // a2
3854 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
3855 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
3856 Register feedback = a4;
3857 Register receiver_map = a5;
3858 Register scratch1 = a6;
3859
3860 __ SmiScale(feedback, slot, kPointerSizeLog2);
3861 __ Daddu(feedback, vector, Operand(feedback));
3862 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3863
3864 // Try to quickly handle the monomorphic case without knowing for sure
3865 // if we have a weak cell in feedback. We do know it's safe to look
3866 // at WeakCell::kValueOffset.
3867 Label try_array, load_smi_map, compare_map;
3868 Label not_array, miss;
3869 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3870 scratch1, &compare_map, &load_smi_map, &try_array);
3871
3872 __ bind(&try_array);
3873 // Is it a fixed array?
3874 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3875 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
3876 __ Branch(&not_array, ne, scratch1, Operand(at));
3877 // We have a polymorphic element handler.
3878 __ JumpIfNotSmi(key, &miss);
3879
3880 Label polymorphic, try_poly_name;
3881 __ bind(&polymorphic);
3882 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, true, &miss);
3883
3884 __ bind(&not_array);
3885 // Is it generic?
3886 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
3887 __ Branch(&try_poly_name, ne, feedback, Operand(at));
3888 Handle<Code> megamorphic_stub =
3889 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3890 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
3891
3892 __ bind(&try_poly_name);
3893 // We might have a name in feedback, and a fixed array in the next slot.
3894 __ Branch(&miss, ne, key, Operand(feedback));
3895 // If the name comparison succeeded, we know we have a fixed array with
3896 // at least one map/handler pair.
3897 __ SmiScale(feedback, slot, kPointerSizeLog2);
3898 __ Daddu(feedback, vector, Operand(feedback));
3899 __ ld(feedback,
3900 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
3901 HandleArrayCases(masm, feedback, receiver_map, scratch1, a7, false, &miss);
3902
3903 __ bind(&miss);
3904 KeyedLoadIC::GenerateMiss(masm);
3905
3906 __ bind(&load_smi_map);
3907 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3908 __ Branch(&compare_map);
3909}
3910
3911
3912void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3913 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3914 VectorStoreICStub stub(isolate(), state());
3915 stub.GenerateForTrampoline(masm);
3916}
3917
3918
3919void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3920 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3921 VectorKeyedStoreICStub stub(isolate(), state());
3922 stub.GenerateForTrampoline(masm);
3923}
3924
3925
3926void VectorStoreICStub::Generate(MacroAssembler* masm) {
3927 GenerateImpl(masm, false);
3928}
3929
3930
3931void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3932 GenerateImpl(masm, true);
3933}
3934
3935
3936void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3937 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1
3938 Register key = VectorStoreICDescriptor::NameRegister(); // a2
3939 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3
3940 Register slot = VectorStoreICDescriptor::SlotRegister(); // a4
3941 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0
3942 Register feedback = a5;
3943 Register receiver_map = a6;
3944 Register scratch1 = a7;
3945
3946 __ SmiScale(scratch1, slot, kPointerSizeLog2);
3947 __ Daddu(feedback, vector, Operand(scratch1));
3948 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3949
3950 // Try to quickly handle the monomorphic case without knowing for sure
3951 // if we have a weak cell in feedback. We do know it's safe to look
3952 // at WeakCell::kValueOffset.
3953 Label try_array, load_smi_map, compare_map;
3954 Label not_array, miss;
3955 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3956 scratch1, &compare_map, &load_smi_map, &try_array);
3957
3958 // Is it a fixed array?
3959 __ bind(&try_array);
3960 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3961 __ Branch(&not_array, ne, scratch1, Heap::kFixedArrayMapRootIndex);
3962
3963 Register scratch2 = t0;
3964 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true,
3965 &miss);
3966
3967 __ bind(&not_array);
3968 __ Branch(&miss, ne, feedback, Heap::kmegamorphic_symbolRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01003969 Code::Flags code_flags =
3970 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003971 masm->isolate()->stub_cache()->GenerateProbe(
3972 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map,
3973 scratch1, scratch2);
3974
3975 __ bind(&miss);
3976 StoreIC::GenerateMiss(masm);
3977
3978 __ bind(&load_smi_map);
3979 __ Branch(USE_DELAY_SLOT, &compare_map);
3980 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot.
3981}
3982
3983
3984void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3985 GenerateImpl(masm, false);
3986}
3987
3988
3989void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3990 GenerateImpl(masm, true);
3991}
3992
3993
3994static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
3995 Register receiver_map, Register scratch1,
3996 Register scratch2, Label* miss) {
3997 // feedback initially contains the feedback array
3998 Label next_loop, prepare_next;
3999 Label start_polymorphic;
4000 Label transition_call;
4001
4002 Register cached_map = scratch1;
4003 Register too_far = scratch2;
4004 Register pointer_reg = feedback;
4005
4006 __ ld(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset));
4007
4008 // +-----+------+------+-----+-----+-----+ ... ----+
4009 // | map | len | wm0 | wt0 | h0 | wm1 | hN |
4010 // +-----+------+------+-----+-----+ ----+ ... ----+
4011 // 0 1 2 len-1
4012 // ^ ^
4013 // | |
4014 // pointer_reg too_far
4015 // aka feedback scratch2
4016 // also need receiver_map
4017 // use cached_map (scratch1) to look in the weak map values.
4018 __ SmiScale(too_far, too_far, kPointerSizeLog2);
4019 __ Daddu(too_far, feedback, Operand(too_far));
4020 __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4021 __ Daddu(pointer_reg, feedback,
4022 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag));
4023
4024 __ bind(&next_loop);
4025 __ ld(cached_map, MemOperand(pointer_reg));
4026 __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
4027 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
4028 // Is it a transitioning store?
4029 __ ld(too_far, MemOperand(pointer_reg, kPointerSize));
4030 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4031 __ Branch(&transition_call, ne, too_far, Operand(at));
4032
4033 __ ld(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2));
4034 __ Daddu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
4035 __ Jump(t9);
4036
4037 __ bind(&transition_call);
4038 __ ld(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset));
4039 __ JumpIfSmi(too_far, miss);
4040
4041 __ ld(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
4042 // Load the map into the correct register.
4043 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
4044 __ Move(feedback, too_far);
4045 __ Daddu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
4046 __ Jump(t9);
4047
4048 __ bind(&prepare_next);
4049 __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 3));
4050 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
4051
4052 // We exhausted our array of map handler pairs.
4053 __ Branch(miss);
4054}
4055
4056
4057void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4058 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1
4059 Register key = VectorStoreICDescriptor::NameRegister(); // a2
4060 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3
4061 Register slot = VectorStoreICDescriptor::SlotRegister(); // a4
4062 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0
4063 Register feedback = a5;
4064 Register receiver_map = a6;
4065 Register scratch1 = a7;
4066
4067 __ SmiScale(scratch1, slot, kPointerSizeLog2);
4068 __ Daddu(feedback, vector, Operand(scratch1));
4069 __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4070
4071 // Try to quickly handle the monomorphic case without knowing for sure
4072 // if we have a weak cell in feedback. We do know it's safe to look
4073 // at WeakCell::kValueOffset.
4074 Label try_array, load_smi_map, compare_map;
4075 Label not_array, miss;
4076 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4077 scratch1, &compare_map, &load_smi_map, &try_array);
4078
4079 __ bind(&try_array);
4080 // Is it a fixed array?
4081 __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4082 __ Branch(&not_array, ne, scratch1, Heap::kFixedArrayMapRootIndex);
4083
4084 // We have a polymorphic element handler.
4085 Label try_poly_name;
4086
4087 Register scratch2 = t0;
4088
4089 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2,
4090 &miss);
4091
4092 __ bind(&not_array);
4093 // Is it generic?
4094 __ Branch(&try_poly_name, ne, feedback, Heap::kmegamorphic_symbolRootIndex);
4095 Handle<Code> megamorphic_stub =
4096 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4097 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
4098
4099 __ bind(&try_poly_name);
4100 // We might have a name in feedback, and a fixed array in the next slot.
4101 __ Branch(&miss, ne, key, Operand(feedback));
4102 // If the name comparison succeeded, we know we have a fixed array with
4103 // at least one map/handler pair.
4104 __ SmiScale(scratch1, slot, kPointerSizeLog2);
4105 __ Daddu(feedback, vector, Operand(scratch1));
4106 __ ld(feedback,
4107 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
4108 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false,
4109 &miss);
4110
4111 __ bind(&miss);
4112 KeyedStoreIC::GenerateMiss(masm);
4113
4114 __ bind(&load_smi_map);
4115 __ Branch(USE_DELAY_SLOT, &compare_map);
4116 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot.
4117}
4118
4119
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004120void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4121 if (masm->isolate()->function_entry_hook() != NULL) {
4122 ProfileEntryHookStub stub(masm->isolate());
4123 __ push(ra);
4124 __ CallStub(&stub);
4125 __ pop(ra);
4126 }
4127}
4128
4129
4130void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4131 // The entry hook is a "push ra" instruction, followed by a call.
4132 // Note: on MIPS "push" is 2 instruction
4133 const int32_t kReturnAddressDistanceFromFunctionStart =
4134 Assembler::kCallTargetAddressOffset + (2 * Assembler::kInstrSize);
4135
4136 // This should contain all kJSCallerSaved registers.
4137 const RegList kSavedRegs =
4138 kJSCallerSaved | // Caller saved registers.
4139 s5.bit(); // Saved stack pointer.
4140
4141 // We also save ra, so the count here is one higher than the mask indicates.
4142 const int32_t kNumSavedRegs = kNumJSCallerSaved + 2;
4143
4144 // Save all caller-save registers as this may be called from anywhere.
4145 __ MultiPush(kSavedRegs | ra.bit());
4146
4147 // Compute the function's address for the first argument.
4148 __ Dsubu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
4149
4150 // The caller's return address is above the saved temporaries.
4151 // Grab that for the second argument to the hook.
4152 __ Daddu(a1, sp, Operand(kNumSavedRegs * kPointerSize));
4153
4154 // Align the stack if necessary.
4155 int frame_alignment = masm->ActivationFrameAlignment();
4156 if (frame_alignment > kPointerSize) {
4157 __ mov(s5, sp);
4158 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
4159 __ And(sp, sp, Operand(-frame_alignment));
4160 }
4161
4162 __ Dsubu(sp, sp, kCArgsSlotsSize);
4163#if defined(V8_HOST_ARCH_MIPS) || defined(V8_HOST_ARCH_MIPS64)
4164 int64_t entry_hook =
4165 reinterpret_cast<int64_t>(isolate()->function_entry_hook());
4166 __ li(t9, Operand(entry_hook));
4167#else
4168 // Under the simulator we need to indirect the entry hook through a
4169 // trampoline function at a known address.
4170 // It additionally takes an isolate as a third parameter.
4171 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4172
4173 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
4174 __ li(t9, Operand(ExternalReference(&dispatcher,
4175 ExternalReference::BUILTIN_CALL,
4176 isolate())));
4177#endif
4178 // Call C function through t9 to conform ABI for PIC.
4179 __ Call(t9);
4180
4181 // Restore the stack pointer if needed.
4182 if (frame_alignment > kPointerSize) {
4183 __ mov(sp, s5);
4184 } else {
4185 __ Daddu(sp, sp, kCArgsSlotsSize);
4186 }
4187
4188 // Also pop ra to get Ret(0).
4189 __ MultiPop(kSavedRegs | ra.bit());
4190 __ Ret();
4191}
4192
4193
4194template<class T>
4195static void CreateArrayDispatch(MacroAssembler* masm,
4196 AllocationSiteOverrideMode mode) {
4197 if (mode == DISABLE_ALLOCATION_SITES) {
4198 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4199 __ TailCallStub(&stub);
4200 } else if (mode == DONT_OVERRIDE) {
4201 int last_index = GetSequenceIndexFromFastElementsKind(
4202 TERMINAL_FAST_ELEMENTS_KIND);
4203 for (int i = 0; i <= last_index; ++i) {
4204 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4205 T stub(masm->isolate(), kind);
4206 __ TailCallStub(&stub, eq, a3, Operand(kind));
4207 }
4208
4209 // If we reached this point there is a problem.
4210 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4211 } else {
4212 UNREACHABLE();
4213 }
4214}
4215
4216
4217static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4218 AllocationSiteOverrideMode mode) {
4219 // a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4220 // a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
4221 // a0 - number of arguments
4222 // a1 - constructor?
4223 // sp[0] - last argument
4224 Label normal_sequence;
4225 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004226 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4227 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4228 STATIC_ASSERT(FAST_ELEMENTS == 2);
4229 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4230 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4231 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004232
4233 // is the low bit set? If so, we are holey and that is good.
4234 __ And(at, a3, Operand(1));
4235 __ Branch(&normal_sequence, ne, at, Operand(zero_reg));
4236 }
4237 // look at the first argument
4238 __ ld(a5, MemOperand(sp, 0));
4239 __ Branch(&normal_sequence, eq, a5, Operand(zero_reg));
4240
4241 if (mode == DISABLE_ALLOCATION_SITES) {
4242 ElementsKind initial = GetInitialFastElementsKind();
4243 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4244
4245 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4246 holey_initial,
4247 DISABLE_ALLOCATION_SITES);
4248 __ TailCallStub(&stub_holey);
4249
4250 __ bind(&normal_sequence);
4251 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4252 initial,
4253 DISABLE_ALLOCATION_SITES);
4254 __ TailCallStub(&stub);
4255 } else if (mode == DONT_OVERRIDE) {
4256 // We are going to create a holey array, but our kind is non-holey.
4257 // Fix kind and retry (only if we have an allocation site in the slot).
4258 __ Daddu(a3, a3, Operand(1));
4259
4260 if (FLAG_debug_code) {
4261 __ ld(a5, FieldMemOperand(a2, 0));
4262 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
4263 __ Assert(eq, kExpectedAllocationSite, a5, Operand(at));
4264 }
4265
4266 // Save the resulting elements kind in type info. We can't just store a3
4267 // in the AllocationSite::transition_info field because elements kind is
4268 // restricted to a portion of the field...upper bits need to be left alone.
4269 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4270 __ ld(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4271 __ Daddu(a4, a4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
4272 __ sd(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4273
4274
4275 __ bind(&normal_sequence);
4276 int last_index = GetSequenceIndexFromFastElementsKind(
4277 TERMINAL_FAST_ELEMENTS_KIND);
4278 for (int i = 0; i <= last_index; ++i) {
4279 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4280 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4281 __ TailCallStub(&stub, eq, a3, Operand(kind));
4282 }
4283
4284 // If we reached this point there is a problem.
4285 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4286 } else {
4287 UNREACHABLE();
4288 }
4289}
4290
4291
4292template<class T>
4293static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4294 int to_index = GetSequenceIndexFromFastElementsKind(
4295 TERMINAL_FAST_ELEMENTS_KIND);
4296 for (int i = 0; i <= to_index; ++i) {
4297 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4298 T stub(isolate, kind);
4299 stub.GetCode();
4300 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4301 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4302 stub1.GetCode();
4303 }
4304 }
4305}
4306
Ben Murdoch61f157c2016-09-16 13:49:30 +01004307void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004308 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4309 isolate);
4310 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4311 isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004312 ArrayNArgumentsConstructorStub stub(isolate);
4313 stub.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004314 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4315 for (int i = 0; i < 2; i++) {
4316 // For internal arrays we only need a few things.
4317 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4318 stubh1.GetCode();
4319 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4320 stubh2.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004321 }
4322}
4323
4324
4325void ArrayConstructorStub::GenerateDispatchToArrayStub(
4326 MacroAssembler* masm,
4327 AllocationSiteOverrideMode mode) {
4328 if (argument_count() == ANY) {
4329 Label not_zero_case, not_one_case;
4330 __ And(at, a0, a0);
4331 __ Branch(&not_zero_case, ne, at, Operand(zero_reg));
4332 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4333
4334 __ bind(&not_zero_case);
4335 __ Branch(&not_one_case, gt, a0, Operand(1));
4336 CreateArrayDispatchOneArgument(masm, mode);
4337
4338 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004339 ArrayNArgumentsConstructorStub stub(masm->isolate());
4340 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004341 } else if (argument_count() == NONE) {
4342 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4343 } else if (argument_count() == ONE) {
4344 CreateArrayDispatchOneArgument(masm, mode);
4345 } else if (argument_count() == MORE_THAN_ONE) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01004346 ArrayNArgumentsConstructorStub stub(masm->isolate());
4347 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004348 } else {
4349 UNREACHABLE();
4350 }
4351}
4352
4353
4354void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4355 // ----------- S t a t e -------------
4356 // -- a0 : argc (only if argument_count() == ANY)
4357 // -- a1 : constructor
4358 // -- a2 : AllocationSite or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004359 // -- a3 : new target
4360 // -- sp[0] : last argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004361 // -----------------------------------
4362
4363 if (FLAG_debug_code) {
4364 // The array construct code is only set for the global and natives
4365 // builtin Array functions which always have maps.
4366
4367 // Initial map for the builtin Array function should be a map.
4368 __ ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4369 // Will both indicate a NULL and a Smi.
4370 __ SmiTst(a4, at);
4371 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
4372 at, Operand(zero_reg));
4373 __ GetObjectType(a4, a4, a5);
4374 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
4375 a5, Operand(MAP_TYPE));
4376
4377 // We should either have undefined in a2 or a valid AllocationSite
4378 __ AssertUndefinedOrAllocationSite(a2, a4);
4379 }
4380
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004381 // Enter the context of the Array function.
4382 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4383
4384 Label subclassing;
4385 __ Branch(&subclassing, ne, a1, Operand(a3));
4386
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004387 Label no_info;
4388 // Get the elements kind and case on that.
4389 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4390 __ Branch(&no_info, eq, a2, Operand(at));
4391
4392 __ ld(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4393 __ SmiUntag(a3);
4394 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4395 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
4396 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4397
4398 __ bind(&no_info);
4399 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004400
4401 // Subclassing.
4402 __ bind(&subclassing);
4403 switch (argument_count()) {
4404 case ANY:
4405 case MORE_THAN_ONE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01004406 __ Dlsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004407 __ sd(a1, MemOperand(at));
4408 __ li(at, Operand(3));
4409 __ Daddu(a0, a0, at);
4410 break;
4411 case NONE:
4412 __ sd(a1, MemOperand(sp, 0 * kPointerSize));
4413 __ li(a0, Operand(3));
4414 break;
4415 case ONE:
4416 __ sd(a1, MemOperand(sp, 1 * kPointerSize));
4417 __ li(a0, Operand(4));
4418 break;
4419 }
4420 __ Push(a3, a2);
4421 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004422}
4423
4424
4425void InternalArrayConstructorStub::GenerateCase(
4426 MacroAssembler* masm, ElementsKind kind) {
4427
4428 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4429 __ TailCallStub(&stub0, lo, a0, Operand(1));
4430
Ben Murdoch61f157c2016-09-16 13:49:30 +01004431 ArrayNArgumentsConstructorStub stubN(isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004432 __ TailCallStub(&stubN, hi, a0, Operand(1));
4433
4434 if (IsFastPackedElementsKind(kind)) {
4435 // We might need to create a holey array
4436 // look at the first argument.
4437 __ ld(at, MemOperand(sp, 0));
4438
4439 InternalArraySingleArgumentConstructorStub
4440 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4441 __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg));
4442 }
4443
4444 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4445 __ TailCallStub(&stub1);
4446}
4447
4448
4449void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4450 // ----------- S t a t e -------------
4451 // -- a0 : argc
4452 // -- a1 : constructor
4453 // -- sp[0] : return address
4454 // -- sp[4] : last argument
4455 // -----------------------------------
4456
4457 if (FLAG_debug_code) {
4458 // The array construct code is only set for the global and natives
4459 // builtin Array functions which always have maps.
4460
4461 // Initial map for the builtin Array function should be a map.
4462 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4463 // Will both indicate a NULL and a Smi.
4464 __ SmiTst(a3, at);
4465 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
4466 at, Operand(zero_reg));
4467 __ GetObjectType(a3, a3, a4);
4468 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
4469 a4, Operand(MAP_TYPE));
4470 }
4471
4472 // Figure out the right elements kind.
4473 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4474
4475 // Load the map's "bit field 2" into a3. We only need the first byte,
4476 // but the following bit field extraction takes care of that anyway.
4477 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
4478 // Retrieve elements_kind from bit field 2.
4479 __ DecodeField<Map::ElementsKindBits>(a3);
4480
4481 if (FLAG_debug_code) {
4482 Label done;
4483 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
4484 __ Assert(
4485 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
4486 a3, Operand(FAST_HOLEY_ELEMENTS));
4487 __ bind(&done);
4488 }
4489
4490 Label fast_elements_case;
4491 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS));
4492 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4493
4494 __ bind(&fast_elements_case);
4495 GenerateCase(masm, FAST_ELEMENTS);
4496}
4497
4498
Ben Murdoch097c5b22016-05-18 11:27:45 +01004499void FastNewObjectStub::Generate(MacroAssembler* masm) {
4500 // ----------- S t a t e -------------
4501 // -- a1 : target
4502 // -- a3 : new target
4503 // -- cp : context
4504 // -- ra : return address
4505 // -----------------------------------
4506 __ AssertFunction(a1);
4507 __ AssertReceiver(a3);
4508
4509 // Verify that the new target is a JSFunction.
4510 Label new_object;
4511 __ GetObjectType(a3, a2, a2);
4512 __ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE));
4513
4514 // Load the initial map and verify that it's in fact a map.
4515 __ ld(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
4516 __ JumpIfSmi(a2, &new_object);
4517 __ GetObjectType(a2, a0, a0);
4518 __ Branch(&new_object, ne, a0, Operand(MAP_TYPE));
4519
4520 // Fall back to runtime if the target differs from the new target's
4521 // initial map constructor.
4522 __ ld(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
4523 __ Branch(&new_object, ne, a0, Operand(a1));
4524
4525 // Allocate the JSObject on the heap.
4526 Label allocate, done_allocate;
4527 __ lbu(a4, FieldMemOperand(a2, Map::kInstanceSizeOffset));
4528 __ Allocate(a4, v0, a5, a0, &allocate, SIZE_IN_WORDS);
4529 __ bind(&done_allocate);
4530
4531 // Initialize the JSObject fields.
Ben Murdochc5610432016-08-08 18:44:38 +01004532 __ sd(a2, FieldMemOperand(v0, JSObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004533 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01004534 __ sd(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4535 __ sd(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004536 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004537 __ Daddu(a1, v0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004538
4539 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01004540 // -- v0 : result (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004541 // -- a1 : result fields (untagged)
4542 // -- a5 : result end (untagged)
4543 // -- a2 : initial map
4544 // -- cp : context
4545 // -- ra : return address
4546 // -----------------------------------
4547
4548 // Perform in-object slack tracking if requested.
4549 Label slack_tracking;
4550 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4551 __ lwu(a3, FieldMemOperand(a2, Map::kBitField3Offset));
4552 __ And(at, a3, Operand(Map::ConstructionCounter::kMask));
4553 __ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(zero_reg));
4554 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot.
4555 {
4556 // Initialize all in-object fields with undefined.
4557 __ InitializeFieldsWithFiller(a1, a5, a0);
Ben Murdochc5610432016-08-08 18:44:38 +01004558 __ Ret();
Ben Murdoch097c5b22016-05-18 11:27:45 +01004559 }
4560 __ bind(&slack_tracking);
4561 {
4562 // Decrease generous allocation count.
4563 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4564 __ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift));
4565 __ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
4566
4567 // Initialize the in-object fields with undefined.
4568 __ lbu(a4, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
4569 __ dsll(a4, a4, kPointerSizeLog2);
4570 __ Dsubu(a4, a5, a4);
4571 __ InitializeFieldsWithFiller(a1, a4, a0);
4572
4573 // Initialize the remaining (reserved) fields with one pointer filler map.
4574 __ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex);
4575 __ InitializeFieldsWithFiller(a1, a5, a0);
4576
4577 // Check if we can finalize the instance size.
4578 Label finalize;
4579 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4580 __ And(a3, a3, Operand(Map::ConstructionCounter::kMask));
Ben Murdochc5610432016-08-08 18:44:38 +01004581 __ Branch(&finalize, eq, a3, Operand(zero_reg));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004582 __ Ret();
4583
4584 // Finalize the instance size.
4585 __ bind(&finalize);
4586 {
4587 FrameScope scope(masm, StackFrame::INTERNAL);
4588 __ Push(v0, a2);
4589 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4590 __ Pop(v0);
4591 }
4592 __ Ret();
4593 }
4594
4595 // Fall back to %AllocateInNewSpace.
4596 __ bind(&allocate);
4597 {
4598 FrameScope scope(masm, StackFrame::INTERNAL);
4599 STATIC_ASSERT(kSmiTag == 0);
4600 STATIC_ASSERT(kSmiTagSize == 1);
4601 __ dsll(a4, a4, kPointerSizeLog2 + kSmiShiftSize + kSmiTagSize);
4602 __ SmiTag(a4);
4603 __ Push(a2, a4);
4604 __ CallRuntime(Runtime::kAllocateInNewSpace);
4605 __ Pop(a2);
4606 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004607 __ lbu(a5, FieldMemOperand(a2, Map::kInstanceSizeOffset));
4608 __ Dlsa(a5, v0, a5, kPointerSizeLog2);
Ben Murdochc5610432016-08-08 18:44:38 +01004609 STATIC_ASSERT(kHeapObjectTag == 1);
4610 __ Dsubu(a5, a5, Operand(kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004611 __ jmp(&done_allocate);
4612
4613 // Fall back to %NewObject.
4614 __ bind(&new_object);
4615 __ Push(a1, a3);
4616 __ TailCallRuntime(Runtime::kNewObject);
4617}
4618
4619
4620void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4621 // ----------- S t a t e -------------
4622 // -- a1 : function
4623 // -- cp : context
4624 // -- fp : frame pointer
4625 // -- ra : return address
4626 // -----------------------------------
4627 __ AssertFunction(a1);
4628
Ben Murdochc5610432016-08-08 18:44:38 +01004629 // Make a2 point to the JavaScript frame.
4630 __ mov(a2, fp);
4631 if (skip_stub_frame()) {
4632 // For Ignition we need to skip the handler/stub frame to reach the
4633 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004634 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004635 }
4636 if (FLAG_debug_code) {
4637 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004638 __ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004639 __ Branch(&ok, eq, a1, Operand(a3));
4640 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4641 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004642 }
4643
4644 // Check if we have rest parameters (only possible if we have an
4645 // arguments adaptor frame below the function frame).
4646 Label no_rest_parameters;
4647 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004648 __ ld(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004649 __ Branch(&no_rest_parameters, ne, a3,
4650 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4651
4652 // Check if the arguments adaptor frame contains more arguments than
4653 // specified by the function's internal formal parameter count.
4654 Label rest_parameters;
4655 __ SmiLoadUntag(
4656 a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01004657 __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4658 __ lw(a3,
4659 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
4660 __ Dsubu(a0, a0, Operand(a3));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004661 __ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
4662
4663 // Return an empty rest parameter array.
4664 __ bind(&no_rest_parameters);
4665 {
4666 // ----------- S t a t e -------------
4667 // -- cp : context
4668 // -- ra : return address
4669 // -----------------------------------
4670
4671 // Allocate an empty rest parameter array.
4672 Label allocate, done_allocate;
Ben Murdochc5610432016-08-08 18:44:38 +01004673 __ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004674 __ bind(&done_allocate);
4675
4676 // Setup the rest parameter array in v0.
4677 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1);
4678 __ sd(a1, FieldMemOperand(v0, JSArray::kMapOffset));
4679 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
4680 __ sd(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
4681 __ sd(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
4682 __ Move(a1, Smi::FromInt(0));
4683 __ Ret(USE_DELAY_SLOT);
4684 __ sd(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
4685 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4686
4687 // Fall back to %AllocateInNewSpace.
4688 __ bind(&allocate);
4689 {
4690 FrameScope scope(masm, StackFrame::INTERNAL);
4691 __ Push(Smi::FromInt(JSArray::kSize));
4692 __ CallRuntime(Runtime::kAllocateInNewSpace);
4693 }
4694 __ jmp(&done_allocate);
4695 }
4696
4697 __ bind(&rest_parameters);
4698 {
4699 // Compute the pointer to the first rest parameter (skippping the receiver).
4700 __ Dlsa(a2, a2, a0, kPointerSizeLog2);
4701 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
4702 1 * kPointerSize));
4703
4704 // ----------- S t a t e -------------
4705 // -- cp : context
4706 // -- a0 : number of rest parameters
Ben Murdoch61f157c2016-09-16 13:49:30 +01004707 // -- a1 : function
Ben Murdoch097c5b22016-05-18 11:27:45 +01004708 // -- a2 : pointer to first rest parameters
4709 // -- ra : return address
4710 // -----------------------------------
4711
4712 // Allocate space for the rest parameter array plus the backing store.
4713 Label allocate, done_allocate;
Ben Murdoch61f157c2016-09-16 13:49:30 +01004714 __ li(a5, Operand(JSArray::kSize + FixedArray::kHeaderSize));
4715 __ Dlsa(a5, a5, a0, kPointerSizeLog2);
4716 __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004717 __ bind(&done_allocate);
4718
4719 // Compute arguments.length in a4.
4720 __ SmiTag(a4, a0);
4721
4722 // Setup the elements array in v0.
4723 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4724 __ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset));
4725 __ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset));
4726 __ Daddu(a3, v0, Operand(FixedArray::kHeaderSize));
4727 {
4728 Label loop, done_loop;
4729 __ Dlsa(a1, a3, a0, kPointerSizeLog2);
4730 __ bind(&loop);
4731 __ Branch(&done_loop, eq, a1, Operand(a3));
4732 __ ld(at, MemOperand(a2, 0 * kPointerSize));
4733 __ sd(at, FieldMemOperand(a3, 0 * kPointerSize));
4734 __ Dsubu(a2, a2, Operand(1 * kPointerSize));
4735 __ Daddu(a3, a3, Operand(1 * kPointerSize));
4736 __ Branch(&loop);
4737 __ bind(&done_loop);
4738 }
4739
4740 // Setup the rest parameter array in a3.
4741 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at);
4742 __ sd(at, FieldMemOperand(a3, JSArray::kMapOffset));
4743 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4744 __ sd(at, FieldMemOperand(a3, JSArray::kPropertiesOffset));
4745 __ sd(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
4746 __ sd(a4, FieldMemOperand(a3, JSArray::kLengthOffset));
4747 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4748 __ Ret(USE_DELAY_SLOT);
4749 __ mov(v0, a3); // In delay slot
4750
Ben Murdoch61f157c2016-09-16 13:49:30 +01004751 // Fall back to %AllocateInNewSpace (if not too big).
4752 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004753 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004754 __ Branch(&too_big_for_new_space, gt, a5,
4755 Operand(Page::kMaxRegularHeapObjectSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004756 {
4757 FrameScope scope(masm, StackFrame::INTERNAL);
4758 __ SmiTag(a0);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004759 __ SmiTag(a5);
4760 __ Push(a0, a2, a5);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004761 __ CallRuntime(Runtime::kAllocateInNewSpace);
4762 __ Pop(a0, a2);
4763 __ SmiUntag(a0);
4764 }
4765 __ jmp(&done_allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004766
4767 // Fall back to %NewStrictArguments.
4768 __ bind(&too_big_for_new_space);
4769 __ Push(a1);
4770 __ TailCallRuntime(Runtime::kNewStrictArguments);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004771 }
4772}
4773
4774
4775void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4776 // ----------- S t a t e -------------
4777 // -- a1 : function
4778 // -- cp : context
4779 // -- fp : frame pointer
4780 // -- ra : return address
4781 // -----------------------------------
4782 __ AssertFunction(a1);
4783
Ben Murdochc5610432016-08-08 18:44:38 +01004784 // Make t0 point to the JavaScript frame.
4785 __ mov(t0, fp);
4786 if (skip_stub_frame()) {
4787 // For Ignition we need to skip the handler/stub frame to reach the
4788 // JavaScript frame for the function.
4789 __ ld(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
4790 }
4791 if (FLAG_debug_code) {
4792 Label ok;
4793 __ ld(a3, MemOperand(t0, StandardFrameConstants::kFunctionOffset));
4794 __ Branch(&ok, eq, a1, Operand(a3));
4795 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4796 __ bind(&ok);
4797 }
4798
Ben Murdoch097c5b22016-05-18 11:27:45 +01004799 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4800 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4801 __ lw(a2,
4802 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004803 __ Lsa(a3, t0, a2, kPointerSizeLog2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004804 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
4805 __ SmiTag(a2);
4806
4807 // a1 : function
4808 // a2 : number of parameters (tagged)
4809 // a3 : parameters pointer
Ben Murdochc5610432016-08-08 18:44:38 +01004810 // t0 : Javascript frame pointer
Ben Murdoch097c5b22016-05-18 11:27:45 +01004811 // Registers used over whole function:
4812 // a5 : arguments count (tagged)
4813 // a6 : mapped parameter count (tagged)
4814
4815 // Check if the calling frame is an arguments adaptor frame.
4816 Label adaptor_frame, try_allocate, runtime;
Ben Murdochc5610432016-08-08 18:44:38 +01004817 __ ld(a4, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004818 __ ld(a0, MemOperand(a4, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004819 __ Branch(&adaptor_frame, eq, a0,
4820 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4821
4822 // No adaptor, parameter count = argument count.
4823 __ mov(a5, a2);
4824 __ Branch(USE_DELAY_SLOT, &try_allocate);
4825 __ mov(a6, a2); // In delay slot.
4826
4827 // We have an adaptor frame. Patch the parameters pointer.
4828 __ bind(&adaptor_frame);
4829 __ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
4830 __ SmiScale(t2, a5, kPointerSizeLog2);
4831 __ Daddu(a4, a4, Operand(t2));
4832 __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset));
4833
4834 // a5 = argument count (tagged)
4835 // a6 = parameter count (tagged)
4836 // Compute the mapped parameter count = min(a6, a5) in a6.
4837 __ mov(a6, a2);
4838 __ Branch(&try_allocate, le, a6, Operand(a5));
4839 __ mov(a6, a5);
4840
4841 __ bind(&try_allocate);
4842
4843 // Compute the sizes of backing store, parameter map, and arguments object.
4844 // 1. Parameter map, has 2 extra words containing context and backing store.
4845 const int kParameterMapHeaderSize =
4846 FixedArray::kHeaderSize + 2 * kPointerSize;
4847 // If there are no mapped parameters, we do not need the parameter_map.
4848 Label param_map_size;
4849 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
4850 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
4851 __ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
4852 __ SmiScale(t1, a6, kPointerSizeLog2);
4853 __ daddiu(t1, t1, kParameterMapHeaderSize);
4854 __ bind(&param_map_size);
4855
4856 // 2. Backing store.
4857 __ SmiScale(t2, a5, kPointerSizeLog2);
4858 __ Daddu(t1, t1, Operand(t2));
4859 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize));
4860
4861 // 3. Arguments object.
4862 __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize));
4863
4864 // Do the allocation of all three objects in one go.
Ben Murdochc5610432016-08-08 18:44:38 +01004865 __ Allocate(t1, v0, t1, a4, &runtime, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004866
4867 // v0 = address of new object(s) (tagged)
4868 // a2 = argument count (smi-tagged)
4869 // Get the arguments boilerplate from the current native context into a4.
4870 const int kNormalOffset =
4871 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
4872 const int kAliasedOffset =
4873 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
4874
4875 __ ld(a4, NativeContextMemOperand());
4876 Label skip2_ne, skip2_eq;
4877 __ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
4878 __ ld(a4, MemOperand(a4, kNormalOffset));
4879 __ bind(&skip2_ne);
4880
4881 __ Branch(&skip2_eq, eq, a6, Operand(zero_reg));
4882 __ ld(a4, MemOperand(a4, kAliasedOffset));
4883 __ bind(&skip2_eq);
4884
4885 // v0 = address of new object (tagged)
4886 // a2 = argument count (smi-tagged)
4887 // a4 = address of arguments map (tagged)
4888 // a6 = mapped parameter count (tagged)
4889 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
4890 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
4891 __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4892 __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset));
4893
4894 // Set up the callee in-object property.
4895 __ AssertNotSmi(a1);
4896 __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
4897
4898 // Use the length (smi tagged) and set that as an in-object property too.
4899 __ AssertSmi(a5);
4900 __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
4901
4902 // Set up the elements pointer in the allocated arguments object.
4903 // If we allocated a parameter map, a4 will point there, otherwise
4904 // it will point to the backing store.
4905 __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize));
4906 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
4907
4908 // v0 = address of new object (tagged)
4909 // a2 = argument count (tagged)
4910 // a4 = address of parameter map or backing store (tagged)
4911 // a6 = mapped parameter count (tagged)
4912 // Initialize parameter map. If there are no mapped arguments, we're done.
4913 Label skip_parameter_map;
4914 Label skip3;
4915 __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0)));
4916 // Move backing store address to a1, because it is
4917 // expected there when filling in the unmapped arguments.
4918 __ mov(a1, a4);
4919 __ bind(&skip3);
4920
4921 __ Branch(&skip_parameter_map, eq, a6, Operand(Smi::FromInt(0)));
4922
4923 __ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
4924 __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
4925 __ Daddu(a5, a6, Operand(Smi::FromInt(2)));
4926 __ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset));
4927 __ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize));
4928 __ SmiScale(t2, a6, kPointerSizeLog2);
4929 __ Daddu(a5, a4, Operand(t2));
4930 __ Daddu(a5, a5, Operand(kParameterMapHeaderSize));
4931 __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize));
4932
4933 // Copy the parameter slots and the holes in the arguments.
4934 // We need to fill in mapped_parameter_count slots. They index the context,
4935 // where parameters are stored in reverse order, at
4936 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4937 // The mapped parameter thus need to get indices
4938 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4939 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4940 // We loop from right to left.
4941 Label parameters_loop, parameters_test;
4942 __ mov(a5, a6);
4943 __ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4944 __ Dsubu(t1, t1, Operand(a6));
4945 __ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
4946 __ SmiScale(t2, a5, kPointerSizeLog2);
4947 __ Daddu(a1, a4, Operand(t2));
4948 __ Daddu(a1, a1, Operand(kParameterMapHeaderSize));
4949
4950 // a1 = address of backing store (tagged)
4951 // a4 = address of parameter map (tagged)
4952 // a0 = temporary scratch (a.o., for address calculation)
4953 // t1 = loop variable (tagged)
4954 // a7 = the hole value
4955 __ jmp(&parameters_test);
4956
4957 __ bind(&parameters_loop);
4958 __ Dsubu(a5, a5, Operand(Smi::FromInt(1)));
4959 __ SmiScale(a0, a5, kPointerSizeLog2);
4960 __ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
4961 __ Daddu(t2, a4, a0);
4962 __ sd(t1, MemOperand(t2));
4963 __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
4964 __ Daddu(t2, a1, a0);
4965 __ sd(a7, MemOperand(t2));
4966 __ Daddu(t1, t1, Operand(Smi::FromInt(1)));
4967 __ bind(&parameters_test);
4968 __ Branch(&parameters_loop, ne, a5, Operand(Smi::FromInt(0)));
4969
4970 // Restore t1 = argument count (tagged).
4971 __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
4972
4973 __ bind(&skip_parameter_map);
4974 // v0 = address of new object (tagged)
4975 // a1 = address of backing store (tagged)
4976 // a5 = argument count (tagged)
4977 // a6 = mapped parameter count (tagged)
4978 // t1 = scratch
4979 // Copy arguments header and remaining slots (if there are any).
4980 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
4981 __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset));
4982 __ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset));
4983
4984 Label arguments_loop, arguments_test;
4985 __ SmiScale(t2, a6, kPointerSizeLog2);
4986 __ Dsubu(a3, a3, Operand(t2));
4987 __ jmp(&arguments_test);
4988
4989 __ bind(&arguments_loop);
4990 __ Dsubu(a3, a3, Operand(kPointerSize));
4991 __ ld(a4, MemOperand(a3, 0));
4992 __ SmiScale(t2, a6, kPointerSizeLog2);
4993 __ Daddu(t1, a1, Operand(t2));
4994 __ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize));
4995 __ Daddu(a6, a6, Operand(Smi::FromInt(1)));
4996
4997 __ bind(&arguments_test);
4998 __ Branch(&arguments_loop, lt, a6, Operand(a5));
4999
5000 // Return.
5001 __ Ret();
5002
5003 // Do the runtime call to allocate the arguments object.
5004 // a5 = argument count (tagged)
5005 __ bind(&runtime);
5006 __ Push(a1, a3, a5);
5007 __ TailCallRuntime(Runtime::kNewSloppyArguments);
5008}
5009
5010
5011void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
5012 // ----------- S t a t e -------------
5013 // -- a1 : function
5014 // -- cp : context
5015 // -- fp : frame pointer
5016 // -- ra : return address
5017 // -----------------------------------
5018 __ AssertFunction(a1);
5019
Ben Murdochc5610432016-08-08 18:44:38 +01005020 // Make a2 point to the JavaScript frame.
5021 __ mov(a2, fp);
5022 if (skip_stub_frame()) {
5023 // For Ignition we need to skip the handler/stub frame to reach the
5024 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005025 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01005026 }
5027 if (FLAG_debug_code) {
5028 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01005029 __ ld(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01005030 __ Branch(&ok, eq, a1, Operand(a3));
5031 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
5032 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005033 }
5034
5035 // Check if we have an arguments adaptor frame below the function frame.
5036 Label arguments_adaptor, arguments_done;
5037 __ ld(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01005038 __ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005039 __ Branch(&arguments_adaptor, eq, a0,
5040 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5041 {
Ben Murdoch61f157c2016-09-16 13:49:30 +01005042 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005043 __ lw(a0,
Ben Murdoch61f157c2016-09-16 13:49:30 +01005044 FieldMemOperand(a4, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005045 __ Dlsa(a2, a2, a0, kPointerSizeLog2);
5046 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
5047 1 * kPointerSize));
5048 }
5049 __ Branch(&arguments_done);
5050 __ bind(&arguments_adaptor);
5051 {
5052 __ SmiLoadUntag(
5053 a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
5054 __ Dlsa(a2, a3, a0, kPointerSizeLog2);
5055 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
5056 1 * kPointerSize));
5057 }
5058 __ bind(&arguments_done);
5059
5060 // ----------- S t a t e -------------
5061 // -- cp : context
5062 // -- a0 : number of rest parameters
Ben Murdoch61f157c2016-09-16 13:49:30 +01005063 // -- a1 : function
Ben Murdoch097c5b22016-05-18 11:27:45 +01005064 // -- a2 : pointer to first rest parameters
5065 // -- ra : return address
5066 // -----------------------------------
5067
5068 // Allocate space for the rest parameter array plus the backing store.
5069 Label allocate, done_allocate;
Ben Murdoch61f157c2016-09-16 13:49:30 +01005070 __ li(a5, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
5071 __ Dlsa(a5, a5, a0, kPointerSizeLog2);
5072 __ Allocate(a5, v0, a3, a4, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005073 __ bind(&done_allocate);
5074
5075 // Compute arguments.length in a4.
5076 __ SmiTag(a4, a0);
5077
5078 // Setup the elements array in v0.
5079 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
5080 __ sd(at, FieldMemOperand(v0, FixedArray::kMapOffset));
5081 __ sd(a4, FieldMemOperand(v0, FixedArray::kLengthOffset));
5082 __ Daddu(a3, v0, Operand(FixedArray::kHeaderSize));
5083 {
5084 Label loop, done_loop;
5085 __ Dlsa(a1, a3, a0, kPointerSizeLog2);
5086 __ bind(&loop);
5087 __ Branch(&done_loop, eq, a1, Operand(a3));
5088 __ ld(at, MemOperand(a2, 0 * kPointerSize));
5089 __ sd(at, FieldMemOperand(a3, 0 * kPointerSize));
5090 __ Dsubu(a2, a2, Operand(1 * kPointerSize));
5091 __ Daddu(a3, a3, Operand(1 * kPointerSize));
5092 __ Branch(&loop);
5093 __ bind(&done_loop);
5094 }
5095
5096 // Setup the strict arguments object in a3.
5097 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at);
5098 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset));
5099 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
5100 __ sd(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset));
5101 __ sd(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset));
5102 __ sd(a4, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset));
5103 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
5104 __ Ret(USE_DELAY_SLOT);
5105 __ mov(v0, a3); // In delay slot
5106
Ben Murdoch61f157c2016-09-16 13:49:30 +01005107 // Fall back to %AllocateInNewSpace (if not too big).
5108 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01005109 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01005110 __ Branch(&too_big_for_new_space, gt, a5,
5111 Operand(Page::kMaxRegularHeapObjectSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005112 {
5113 FrameScope scope(masm, StackFrame::INTERNAL);
5114 __ SmiTag(a0);
Ben Murdoch61f157c2016-09-16 13:49:30 +01005115 __ SmiTag(a5);
5116 __ Push(a0, a2, a5);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005117 __ CallRuntime(Runtime::kAllocateInNewSpace);
5118 __ Pop(a0, a2);
5119 __ SmiUntag(a0);
5120 }
5121 __ jmp(&done_allocate);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005122
Ben Murdoch61f157c2016-09-16 13:49:30 +01005123 // Fall back to %NewStrictArguments.
5124 __ bind(&too_big_for_new_space);
5125 __ Push(a1);
5126 __ TailCallRuntime(Runtime::kNewStrictArguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005127}
5128
5129
5130void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5131 Register context_reg = cp;
5132 Register slot_reg = a2;
5133 Register value_reg = a0;
5134 Register cell_reg = a4;
5135 Register cell_value_reg = a5;
5136 Register cell_details_reg = a6;
5137 Label fast_heapobject_case, fast_smi_case, slow_case;
5138
5139 if (FLAG_debug_code) {
5140 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5141 __ Check(ne, kUnexpectedValue, value_reg, Operand(at));
5142 }
5143
5144 // Go up context chain to the script context.
5145 for (int i = 0; i < depth(); ++i) {
5146 __ ld(cell_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
5147 context_reg = cell_reg;
5148 }
5149
5150 // Load the PropertyCell at the specified slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005151 __ Dlsa(at, context_reg, slot_reg, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005152 __ ld(cell_reg, ContextMemOperand(at, 0));
5153
5154 // Load PropertyDetails for the cell (actually only the cell_type and kind).
5155 __ ld(cell_details_reg,
5156 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset));
5157 __ SmiUntag(cell_details_reg);
5158 __ And(cell_details_reg, cell_details_reg,
5159 PropertyDetails::PropertyCellTypeField::kMask |
5160 PropertyDetails::KindField::kMask |
5161 PropertyDetails::kAttributesReadOnlyMask);
5162
5163 // Check if PropertyCell holds mutable data.
5164 Label not_mutable_data;
5165 __ Branch(&not_mutable_data, ne, cell_details_reg,
5166 Operand(PropertyDetails::PropertyCellTypeField::encode(
5167 PropertyCellType::kMutable) |
5168 PropertyDetails::KindField::encode(kData)));
5169 __ JumpIfSmi(value_reg, &fast_smi_case);
5170 __ bind(&fast_heapobject_case);
5171 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5172 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5173 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs,
5174 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
5175 // RecordWriteField clobbers the value register, so we need to reload.
5176 __ Ret(USE_DELAY_SLOT);
5177 __ ld(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5178 __ bind(&not_mutable_data);
5179
5180 // Check if PropertyCell value matches the new value (relevant for Constant,
5181 // ConstantType and Undefined cells).
5182 Label not_same_value;
5183 __ ld(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5184 __ Branch(&not_same_value, ne, value_reg, Operand(cell_value_reg));
5185 // Make sure the PropertyCell is not marked READ_ONLY.
5186 __ And(at, cell_details_reg, PropertyDetails::kAttributesReadOnlyMask);
5187 __ Branch(&slow_case, ne, at, Operand(zero_reg));
5188 if (FLAG_debug_code) {
5189 Label done;
5190 // This can only be true for Constant, ConstantType and Undefined cells,
5191 // because we never store the_hole via this stub.
5192 __ Branch(&done, eq, cell_details_reg,
5193 Operand(PropertyDetails::PropertyCellTypeField::encode(
5194 PropertyCellType::kConstant) |
5195 PropertyDetails::KindField::encode(kData)));
5196 __ Branch(&done, eq, cell_details_reg,
5197 Operand(PropertyDetails::PropertyCellTypeField::encode(
5198 PropertyCellType::kConstantType) |
5199 PropertyDetails::KindField::encode(kData)));
5200 __ Check(eq, kUnexpectedValue, cell_details_reg,
5201 Operand(PropertyDetails::PropertyCellTypeField::encode(
5202 PropertyCellType::kUndefined) |
5203 PropertyDetails::KindField::encode(kData)));
5204 __ bind(&done);
5205 }
5206 __ Ret();
5207 __ bind(&not_same_value);
5208
5209 // Check if PropertyCell contains data with constant type (and is not
5210 // READ_ONLY).
5211 __ Branch(&slow_case, ne, cell_details_reg,
5212 Operand(PropertyDetails::PropertyCellTypeField::encode(
5213 PropertyCellType::kConstantType) |
5214 PropertyDetails::KindField::encode(kData)));
5215
5216 // Now either both old and new values must be SMIs or both must be heap
5217 // objects with same map.
5218 Label value_is_heap_object;
5219 __ JumpIfNotSmi(value_reg, &value_is_heap_object);
5220 __ JumpIfNotSmi(cell_value_reg, &slow_case);
5221 // Old and new values are SMIs, no need for a write barrier here.
5222 __ bind(&fast_smi_case);
5223 __ Ret(USE_DELAY_SLOT);
5224 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5225 __ bind(&value_is_heap_object);
5226 __ JumpIfSmi(cell_value_reg, &slow_case);
5227 Register cell_value_map_reg = cell_value_reg;
5228 __ ld(cell_value_map_reg,
5229 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
5230 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
5231 FieldMemOperand(value_reg, HeapObject::kMapOffset));
5232
5233 // Fallback to the runtime.
5234 __ bind(&slow_case);
5235 __ SmiTag(slot_reg);
5236 __ Push(slot_reg, value_reg);
5237 __ TailCallRuntime(is_strict(language_mode())
5238 ? Runtime::kStoreGlobalViaContext_Strict
5239 : Runtime::kStoreGlobalViaContext_Sloppy);
5240}
5241
5242
5243static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5244 int64_t offset = (ref0.address() - ref1.address());
5245 DCHECK(static_cast<int>(offset) == offset);
5246 return static_cast<int>(offset);
5247}
5248
5249
5250// Calls an API function. Allocates HandleScope, extracts returned value
5251// from handle and propagates exceptions. Restores context. stack_space
5252// - space to be unwound on exit (includes the call JS arguments space and
5253// the additional space allocated for the fast call).
5254static void CallApiFunctionAndReturn(
5255 MacroAssembler* masm, Register function_address,
5256 ExternalReference thunk_ref, int stack_space, int32_t stack_space_offset,
5257 MemOperand return_value_operand, MemOperand* context_restore_operand) {
5258 Isolate* isolate = masm->isolate();
5259 ExternalReference next_address =
5260 ExternalReference::handle_scope_next_address(isolate);
5261 const int kNextOffset = 0;
5262 const int kLimitOffset = AddressOffset(
5263 ExternalReference::handle_scope_limit_address(isolate), next_address);
5264 const int kLevelOffset = AddressOffset(
5265 ExternalReference::handle_scope_level_address(isolate), next_address);
5266
5267 DCHECK(function_address.is(a1) || function_address.is(a2));
5268
5269 Label profiler_disabled;
5270 Label end_profiler_check;
5271 __ li(t9, Operand(ExternalReference::is_profiling_address(isolate)));
5272 __ lb(t9, MemOperand(t9, 0));
5273 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
5274
5275 // Additional parameter is the address of the actual callback.
5276 __ li(t9, Operand(thunk_ref));
5277 __ jmp(&end_profiler_check);
5278
5279 __ bind(&profiler_disabled);
5280 __ mov(t9, function_address);
5281 __ bind(&end_profiler_check);
5282
5283 // Allocate HandleScope in callee-save registers.
5284 __ li(s3, Operand(next_address));
5285 __ ld(s0, MemOperand(s3, kNextOffset));
5286 __ ld(s1, MemOperand(s3, kLimitOffset));
5287 __ lw(s2, MemOperand(s3, kLevelOffset));
5288 __ Addu(s2, s2, Operand(1));
5289 __ sw(s2, MemOperand(s3, kLevelOffset));
5290
5291 if (FLAG_log_timer_events) {
5292 FrameScope frame(masm, StackFrame::MANUAL);
5293 __ PushSafepointRegisters();
5294 __ PrepareCallCFunction(1, a0);
5295 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5296 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5297 1);
5298 __ PopSafepointRegisters();
5299 }
5300
5301 // Native call returns to the DirectCEntry stub which redirects to the
5302 // return address pushed on stack (could have moved after GC).
5303 // DirectCEntry stub itself is generated early and never moves.
5304 DirectCEntryStub stub(isolate);
5305 stub.GenerateCall(masm, t9);
5306
5307 if (FLAG_log_timer_events) {
5308 FrameScope frame(masm, StackFrame::MANUAL);
5309 __ PushSafepointRegisters();
5310 __ PrepareCallCFunction(1, a0);
5311 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5312 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5313 1);
5314 __ PopSafepointRegisters();
5315 }
5316
5317 Label promote_scheduled_exception;
5318 Label delete_allocated_handles;
5319 Label leave_exit_frame;
5320 Label return_value_loaded;
5321
5322 // Load value from ReturnValue.
5323 __ ld(v0, return_value_operand);
5324 __ bind(&return_value_loaded);
5325
5326 // No more valid handles (the result handle was the last one). Restore
5327 // previous handle scope.
5328 __ sd(s0, MemOperand(s3, kNextOffset));
5329 if (__ emit_debug_code()) {
5330 __ lw(a1, MemOperand(s3, kLevelOffset));
5331 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
5332 }
5333 __ Subu(s2, s2, Operand(1));
5334 __ sw(s2, MemOperand(s3, kLevelOffset));
5335 __ ld(at, MemOperand(s3, kLimitOffset));
5336 __ Branch(&delete_allocated_handles, ne, s1, Operand(at));
5337
5338 // Leave the API exit frame.
5339 __ bind(&leave_exit_frame);
5340
5341 bool restore_context = context_restore_operand != NULL;
5342 if (restore_context) {
5343 __ ld(cp, *context_restore_operand);
5344 }
5345 if (stack_space_offset != kInvalidStackOffset) {
5346 DCHECK(kCArgsSlotsSize == 0);
5347 __ ld(s0, MemOperand(sp, stack_space_offset));
5348 } else {
5349 __ li(s0, Operand(stack_space));
5350 }
5351 __ LeaveExitFrame(false, s0, !restore_context, NO_EMIT_RETURN,
5352 stack_space_offset != kInvalidStackOffset);
5353
5354 // Check if the function scheduled an exception.
5355 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
5356 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
5357 __ ld(a5, MemOperand(at));
5358 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
5359
5360 __ Ret();
5361
5362 // Re-throw by promoting a scheduled exception.
5363 __ bind(&promote_scheduled_exception);
5364 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5365
5366 // HandleScope limit has changed. Delete allocated extensions.
5367 __ bind(&delete_allocated_handles);
5368 __ sd(s1, MemOperand(s3, kLimitOffset));
5369 __ mov(s0, v0);
5370 __ mov(a0, v0);
5371 __ PrepareCallCFunction(1, s1);
5372 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5373 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5374 1);
5375 __ mov(v0, s0);
5376 __ jmp(&leave_exit_frame);
5377}
5378
Ben Murdochda12d292016-06-02 14:46:10 +01005379void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005380 // ----------- S t a t e -------------
5381 // -- a0 : callee
5382 // -- a4 : call_data
5383 // -- a2 : holder
5384 // -- a1 : api_function_address
5385 // -- cp : context
5386 // --
5387 // -- sp[0] : last argument
5388 // -- ...
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005389 // -- sp[(argc - 1)* 8] : first argument
5390 // -- sp[argc * 8] : receiver
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005391 // -----------------------------------
5392
5393 Register callee = a0;
5394 Register call_data = a4;
5395 Register holder = a2;
5396 Register api_function_address = a1;
5397 Register context = cp;
5398
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005399 typedef FunctionCallbackArguments FCA;
5400
5401 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5402 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5403 STATIC_ASSERT(FCA::kDataIndex == 4);
5404 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5405 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5406 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5407 STATIC_ASSERT(FCA::kHolderIndex == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01005408 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
5409 STATIC_ASSERT(FCA::kArgsLength == 8);
5410
5411 // new target
5412 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005413
5414 // Save context, callee and call data.
5415 __ Push(context, callee, call_data);
Ben Murdochda12d292016-06-02 14:46:10 +01005416 if (!is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005417 // Load context from callee.
5418 __ ld(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5419 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005420
5421 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005422 if (!call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005423 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5424 }
5425 // Push return value and default return value.
5426 __ Push(scratch, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005427 __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005428 // Push isolate and holder.
5429 __ Push(scratch, holder);
5430
5431 // Prepare arguments.
5432 __ mov(scratch, sp);
5433
5434 // Allocate the v8::Arguments structure in the arguments' space since
5435 // it's not controlled by GC.
Ben Murdochc5610432016-08-08 18:44:38 +01005436 const int kApiStackSpace = 3;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005437
5438 FrameScope frame_scope(masm, StackFrame::MANUAL);
5439 __ EnterExitFrame(false, kApiStackSpace);
5440
5441 DCHECK(!api_function_address.is(a0) && !scratch.is(a0));
5442 // a0 = FunctionCallbackInfo&
5443 // Arguments is after the return address.
5444 __ Daddu(a0, sp, Operand(1 * kPointerSize));
5445 // FunctionCallbackInfo::implicit_args_
5446 __ sd(scratch, MemOperand(a0, 0 * kPointerSize));
Ben Murdochda12d292016-06-02 14:46:10 +01005447 // FunctionCallbackInfo::values_
5448 __ Daddu(at, scratch,
5449 Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize));
5450 __ sd(at, MemOperand(a0, 1 * kPointerSize));
5451 // FunctionCallbackInfo::length_ = argc
5452 // Stored as int field, 32-bit integers within struct on stack always left
5453 // justified by n64 ABI.
5454 __ li(at, Operand(argc()));
5455 __ sw(at, MemOperand(a0, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005456
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005457 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005458 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005459
5460 AllowExternalCallThatCantCauseGC scope(masm);
5461 MemOperand context_restore_operand(
5462 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5463 // Stores return the first js argument.
5464 int return_value_offset = 0;
Ben Murdochda12d292016-06-02 14:46:10 +01005465 if (is_store()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005466 return_value_offset = 2 + FCA::kArgsLength;
5467 } else {
5468 return_value_offset = 2 + FCA::kReturnValueOffset;
5469 }
5470 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005471 int stack_space = 0;
Ben Murdochc5610432016-08-08 18:44:38 +01005472 int32_t stack_space_offset = 3 * kPointerSize;
Ben Murdochda12d292016-06-02 14:46:10 +01005473 stack_space = argc() + FCA::kArgsLength + 1;
Ben Murdochc5610432016-08-08 18:44:38 +01005474 // TODO(adamk): Why are we clobbering this immediately?
Ben Murdochda12d292016-06-02 14:46:10 +01005475 stack_space_offset = kInvalidStackOffset;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005476 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
5477 stack_space_offset, return_value_operand,
5478 &context_restore_operand);
5479}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005480
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005481
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005482void CallApiGetterStub::Generate(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01005483 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
5484 // name below the exit frame to make GC aware of them.
5485 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
5486 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
5487 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
5488 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
5489 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
5490 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
5491 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
5492 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005493
Ben Murdochc5610432016-08-08 18:44:38 +01005494 Register receiver = ApiGetterDescriptor::ReceiverRegister();
5495 Register holder = ApiGetterDescriptor::HolderRegister();
5496 Register callback = ApiGetterDescriptor::CallbackRegister();
5497 Register scratch = a4;
5498 DCHECK(!AreAliased(receiver, holder, callback, scratch));
5499
5500 Register api_function_address = a2;
5501
5502 // Here and below +1 is for name() pushed after the args_ array.
5503 typedef PropertyCallbackArguments PCA;
5504 __ Dsubu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize);
5505 __ sd(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize));
5506 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
5507 __ sd(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize));
5508 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5509 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
5510 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
5511 kPointerSize));
5512 __ li(scratch, Operand(ExternalReference::isolate_address(isolate())));
5513 __ sd(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
5514 __ sd(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
5515 // should_throw_on_error -> false
5516 DCHECK(Smi::FromInt(0) == nullptr);
5517 __ sd(zero_reg,
5518 MemOperand(sp, (PCA::kShouldThrowOnErrorIndex + 1) * kPointerSize));
5519 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
5520 __ sd(scratch, MemOperand(sp, 0 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005521
Ben Murdoch097c5b22016-05-18 11:27:45 +01005522 // v8::PropertyCallbackInfo::args_ array and name handle.
5523 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5524
5525 // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
5526 __ mov(a0, sp); // a0 = Handle<Name>
5527 __ Daddu(a1, a0, Operand(1 * kPointerSize)); // a1 = v8::PCI::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005528
5529 const int kApiStackSpace = 1;
5530 FrameScope frame_scope(masm, StackFrame::MANUAL);
5531 __ EnterExitFrame(false, kApiStackSpace);
5532
Ben Murdoch097c5b22016-05-18 11:27:45 +01005533 // Create v8::PropertyCallbackInfo object on the stack and initialize
5534 // it's args_ field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005535 __ sd(a1, MemOperand(sp, 1 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005536 __ Daddu(a1, sp, Operand(1 * kPointerSize));
5537 // a1 = v8::PropertyCallbackInfo&
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005538
5539 ExternalReference thunk_ref =
5540 ExternalReference::invoke_accessor_getter_callback(isolate());
Ben Murdoch097c5b22016-05-18 11:27:45 +01005541
Ben Murdochc5610432016-08-08 18:44:38 +01005542 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
5543 __ ld(api_function_address,
5544 FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
5545
Ben Murdoch097c5b22016-05-18 11:27:45 +01005546 // +3 is to skip prolog, return address and name handle.
5547 MemOperand return_value_operand(
5548 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005549 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5550 kStackUnwindSpace, kInvalidStackOffset,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005551 return_value_operand, NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005552}
5553
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005554#undef __
5555
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005556} // namespace internal
5557} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005558
5559#endif // V8_TARGET_ARCH_MIPS64