blob: fd286fbb77bead5ac29371499c24fca59b0aaaf1 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Block44f0eee2011-05-26 01:26:41 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS
Steve Block44f0eee2011-05-26 01:26:41 +01006
Ben Murdochda12d292016-06-02 14:46:10 +01007#include "src/code-stubs.h"
8#include "src/api-arguments.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/codegen.h"
12#include "src/ic/handler-compiler.h"
13#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000016#include "src/mips/code-stubs-mips.h"
17#include "src/regexp/jsregexp.h"
18#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040019#include "src/runtime/runtime.h"
Steve Block44f0eee2011-05-26 01:26:41 +010020
21namespace v8 {
22namespace internal {
23
24
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025static void InitializeArrayConstructorDescriptor(
26 Isolate* isolate, CodeStubDescriptor* descriptor,
27 int constant_stack_parameter_count) {
28 Address deopt_handler = Runtime::FunctionForId(
29 Runtime::kArrayConstructor)->entry;
30
31 if (constant_stack_parameter_count == 0) {
32 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
33 JS_FUNCTION_STUB_MODE);
34 } else {
35 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037 }
38}
39
40
41static void InitializeInternalArrayConstructorDescriptor(
42 Isolate* isolate, CodeStubDescriptor* descriptor,
43 int constant_stack_parameter_count) {
44 Address deopt_handler = Runtime::FunctionForId(
45 Runtime::kInternalArrayConstructor)->entry;
46
47 if (constant_stack_parameter_count == 0) {
48 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
49 JS_FUNCTION_STUB_MODE);
50 } else {
51 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000052 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 }
54}
55
56
57void ArrayNoArgumentConstructorStub::InitializeDescriptor(
58 CodeStubDescriptor* descriptor) {
59 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
60}
61
62
63void ArraySingleArgumentConstructorStub::InitializeDescriptor(
64 CodeStubDescriptor* descriptor) {
65 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
66}
67
68
69void ArrayNArgumentsConstructorStub::InitializeDescriptor(
70 CodeStubDescriptor* descriptor) {
71 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
72}
73
74
75void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
76 CodeStubDescriptor* descriptor) {
77 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
78}
79
Ben Murdochda12d292016-06-02 14:46:10 +010080void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
81 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
82 descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
83}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084
85void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
86 CodeStubDescriptor* descriptor) {
87 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
88}
89
90
91void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
92 CodeStubDescriptor* descriptor) {
93 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
94}
95
96
Steve Block44f0eee2011-05-26 01:26:41 +010097#define __ ACCESS_MASM(masm)
98
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100100 Condition cc);
Ben Murdoch257744e2011-11-30 15:57:28 +0000101static void EmitSmiNonsmiComparison(MacroAssembler* masm,
102 Register lhs,
103 Register rhs,
104 Label* rhs_not_nan,
105 Label* slow,
106 bool strict);
Ben Murdoch257744e2011-11-30 15:57:28 +0000107static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
108 Register lhs,
109 Register rhs);
110
111
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
113 ExternalReference miss) {
114 // Update the static counter each time a new code stub is generated.
115 isolate()->counters()->code_stubs()->Increment();
Ben Murdoch257744e2011-11-30 15:57:28 +0000116
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000118 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 {
120 // Call the runtime system in a fresh internal frame.
121 FrameScope scope(masm, StackFrame::INTERNAL);
122 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 a0.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124 // Push arguments, adjust sp.
125 __ Subu(sp, sp, Operand(param_count * kPointerSize));
126 for (int i = 0; i < param_count; ++i) {
127 // Store argument to stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000128 __ sw(descriptor.GetRegisterParameter(i),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 MemOperand(sp, (param_count - 1 - i) * kPointerSize));
130 }
131 __ CallExternalReference(miss, param_count);
132 }
Steve Block44f0eee2011-05-26 01:26:41 +0100133
Ben Murdoch257744e2011-11-30 15:57:28 +0000134 __ Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100135}
136
137
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138void DoubleToIStub::Generate(MacroAssembler* masm) {
139 Label out_of_range, only_low, negate, done;
140 Register input_reg = source();
141 Register result_reg = destination();
Ben Murdoch257744e2011-11-30 15:57:28 +0000142
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000143 int double_offset = offset();
144 // Account for saved regs if input is sp.
145 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000146
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147 Register scratch =
148 GetRegisterThatIsNotOneOf(input_reg, result_reg);
149 Register scratch2 =
150 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
151 Register scratch3 =
152 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch2);
153 DoubleRegister double_scratch = kLithiumScratchDouble;
Ben Murdoch257744e2011-11-30 15:57:28 +0000154
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 __ Push(scratch, scratch2, scratch3);
Ben Murdoch257744e2011-11-30 15:57:28 +0000156
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000157 if (!skip_fastpath()) {
158 // Load double input.
159 __ ldc1(double_scratch, MemOperand(input_reg, double_offset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000160
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 // Clear cumulative exception flags and save the FCSR.
162 __ cfc1(scratch2, FCSR);
163 __ ctc1(zero_reg, FCSR);
Ben Murdoch257744e2011-11-30 15:57:28 +0000164
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165 // Try a conversion to a signed integer.
166 __ Trunc_w_d(double_scratch, double_scratch);
167 // Move the converted value into the result register.
168 __ mfc1(scratch3, double_scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +0000169
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 // Retrieve and restore the FCSR.
171 __ cfc1(scratch, FCSR);
172 __ ctc1(scratch2, FCSR);
Steve Block44f0eee2011-05-26 01:26:41 +0100173
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 // Check for overflow and NaNs.
175 __ And(
176 scratch, scratch,
177 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask
178 | kFCSRInvalidOpFlagMask);
179 // If we had no exceptions then set result_reg and we are done.
180 Label error;
181 __ Branch(&error, ne, scratch, Operand(zero_reg));
182 __ Move(result_reg, scratch3);
Ben Murdoch257744e2011-11-30 15:57:28 +0000183 __ Branch(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 __ bind(&error);
Ben Murdoch257744e2011-11-30 15:57:28 +0000185 }
186
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 // Load the double value and perform a manual truncation.
188 Register input_high = scratch2;
189 Register input_low = scratch3;
Ben Murdoch257744e2011-11-30 15:57:28 +0000190
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191 __ lw(input_low,
192 MemOperand(input_reg, double_offset + Register::kMantissaOffset));
193 __ lw(input_high,
194 MemOperand(input_reg, double_offset + Register::kExponentOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000195
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000196 Label normal_exponent, restore_sign;
197 // Extract the biased exponent in result.
198 __ Ext(result_reg,
199 input_high,
Ben Murdoch257744e2011-11-30 15:57:28 +0000200 HeapNumber::kExponentShift,
201 HeapNumber::kExponentBits);
202
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000203 // Check for Infinity and NaNs, which should return 0.
204 __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
205 __ Movz(result_reg, zero_reg, scratch);
206 __ Branch(&done, eq, scratch, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000207
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000208 // Express exponent as delta to (number of mantissa bits + 31).
209 __ Subu(result_reg,
210 result_reg,
211 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
Ben Murdoch257744e2011-11-30 15:57:28 +0000212
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 // If the delta is strictly positive, all bits would be shifted away,
214 // which means that we can return 0.
215 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
216 __ mov(result_reg, zero_reg);
217 __ Branch(&done);
Ben Murdoch257744e2011-11-30 15:57:28 +0000218
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000219 __ bind(&normal_exponent);
220 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
221 // Calculate shift.
222 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
Ben Murdoch257744e2011-11-30 15:57:28 +0000223
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 // Save the sign.
225 Register sign = result_reg;
226 result_reg = no_reg;
227 __ And(sign, input_high, Operand(HeapNumber::kSignMask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000228
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000229 // On ARM shifts > 31 bits are valid and will result in zero. On MIPS we need
230 // to check for this specific case.
231 Label high_shift_needed, high_shift_done;
232 __ Branch(&high_shift_needed, lt, scratch, Operand(32));
233 __ mov(input_high, zero_reg);
234 __ Branch(&high_shift_done);
235 __ bind(&high_shift_needed);
236
237 // Set the implicit 1 before the mantissa part in input_high.
238 __ Or(input_high,
239 input_high,
240 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
241 // Shift the mantissa bits to the correct position.
242 // We don't need to clear non-mantissa bits as they will be shifted away.
243 // If they weren't, it would mean that the answer is in the 32bit range.
244 __ sllv(input_high, input_high, scratch);
245
246 __ bind(&high_shift_done);
247
248 // Replace the shifted bits with bits from the lower mantissa word.
249 Label pos_shift, shift_done;
Ben Murdoch257744e2011-11-30 15:57:28 +0000250 __ li(at, 32);
251 __ subu(scratch, at, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
253
254 // Negate scratch.
255 __ Subu(scratch, zero_reg, scratch);
256 __ sllv(input_low, input_low, scratch);
257 __ Branch(&shift_done);
258
259 __ bind(&pos_shift);
260 __ srlv(input_low, input_low, scratch);
261
262 __ bind(&shift_done);
263 __ Or(input_high, input_high, Operand(input_low));
264 // Restore sign if necessary.
265 __ mov(scratch, sign);
266 result_reg = sign;
267 sign = no_reg;
268 __ Subu(result_reg, zero_reg, input_high);
269 __ Movz(result_reg, input_high, scratch);
270
271 __ bind(&done);
272
273 __ Pop(scratch, scratch2, scratch3);
274 __ Ret();
Ben Murdoch257744e2011-11-30 15:57:28 +0000275}
276
277
Ben Murdoch257744e2011-11-30 15:57:28 +0000278// Handle the case where the lhs and rhs are the same object.
279// Equality is almost reflexive (everything but NaN), so this is a test
280// for "identity and not NaN".
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000281static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100282 Condition cc) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000283 Label not_identical;
284 Label heap_number, return_equal;
285 Register exp_mask_reg = t5;
286
287 __ Branch(&not_identical, ne, a0, Operand(a1));
288
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask));
Ben Murdoch257744e2011-11-30 15:57:28 +0000290
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
292 // so we do the second best thing - test it ourselves.
293 // They are both equal and they are not both Smis so both of them are not
294 // Smis. If it's not a heap number, then return equal.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000295 __ GetObjectType(a0, t4, t4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000296 if (cc == less || cc == greater) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000297 // Call runtime on identical JSObjects.
298 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE));
299 // Call runtime on identical symbols since we need to throw a TypeError.
300 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE));
301 // Call runtime on identical SIMD values since we must throw a TypeError.
302 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000303 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE));
305 // Comparing JS objects with <=, >= is complicated.
306 if (cc != eq) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100307 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE));
308 // Call runtime on identical symbols since we need to throw a TypeError.
309 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE));
310 // Call runtime on identical SIMD values since we must throw a TypeError.
311 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000312 // Normally here we fall through to return_equal, but undefined is
313 // special: (undefined == undefined) == true, but
314 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
315 if (cc == less_equal || cc == greater_equal) {
316 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE));
317 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
318 __ Branch(&return_equal, ne, a0, Operand(t2));
319 DCHECK(is_int16(GREATER) && is_int16(LESS));
320 __ Ret(USE_DELAY_SLOT);
321 if (cc == le) {
322 // undefined <= undefined should fail.
323 __ li(v0, Operand(GREATER));
324 } else {
325 // undefined >= undefined should fail.
326 __ li(v0, Operand(LESS));
Ben Murdoch257744e2011-11-30 15:57:28 +0000327 }
328 }
329 }
330 }
331
332 __ bind(&return_equal);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 DCHECK(is_int16(GREATER) && is_int16(LESS));
334 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000335 if (cc == less) {
336 __ li(v0, Operand(GREATER)); // Things aren't less than themselves.
337 } else if (cc == greater) {
338 __ li(v0, Operand(LESS)); // Things aren't greater than themselves.
339 } else {
340 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves.
341 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000342
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000343 // For less and greater we don't have to check for NaN since the result of
344 // x < x is false regardless. For the others here is some code to check
345 // for NaN.
346 if (cc != lt && cc != gt) {
347 __ bind(&heap_number);
348 // It is a heap number, so return non-equal if it's NaN and equal if it's
349 // not NaN.
Ben Murdoch257744e2011-11-30 15:57:28 +0000350
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000351 // The representation of NaN values has all exponent bits (52..62) set,
352 // and not all mantissa bits (0..51) clear.
353 // Read top bits of double representation (second word of value).
354 __ lw(t2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
355 // Test that exponent bits are all set.
356 __ And(t3, t2, Operand(exp_mask_reg));
357 // If all bits not set (ne cond), then not a NaN, objects are equal.
358 __ Branch(&return_equal, ne, t3, Operand(exp_mask_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000359
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 // Shift out flag and all exponent bits, retaining only mantissa.
361 __ sll(t2, t2, HeapNumber::kNonMantissaBitsInTopWord);
362 // Or with all low-bits of mantissa.
363 __ lw(t3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
364 __ Or(v0, t3, Operand(t2));
365 // For equal we already have the right value in v0: Return zero (equal)
366 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
367 // not (it's a NaN). For <= and >= we need to load v0 with the failing
368 // value if it's a NaN.
369 if (cc != eq) {
370 // All-zero means Infinity means equal.
371 __ Ret(eq, v0, Operand(zero_reg));
372 DCHECK(is_int16(GREATER) && is_int16(LESS));
373 __ Ret(USE_DELAY_SLOT);
374 if (cc == le) {
375 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail.
376 } else {
377 __ li(v0, Operand(LESS)); // NaN >= NaN should fail.
Ben Murdoch257744e2011-11-30 15:57:28 +0000378 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000379 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000380 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000381 // No fall through here.
Ben Murdoch257744e2011-11-30 15:57:28 +0000382
383 __ bind(&not_identical);
384}
385
386
387static void EmitSmiNonsmiComparison(MacroAssembler* masm,
388 Register lhs,
389 Register rhs,
390 Label* both_loaded_as_doubles,
391 Label* slow,
392 bool strict) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000393 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
Ben Murdoch257744e2011-11-30 15:57:28 +0000394 (lhs.is(a1) && rhs.is(a0)));
395
396 Label lhs_is_smi;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100397 __ JumpIfSmi(lhs, &lhs_is_smi);
Ben Murdoch257744e2011-11-30 15:57:28 +0000398 // Rhs is a Smi.
399 // Check whether the non-smi is a heap number.
400 __ GetObjectType(lhs, t4, t4);
401 if (strict) {
402 // If lhs was not a number and rhs was a Smi then strict equality cannot
403 // succeed. Return non-equal (lhs is already not zero).
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100404 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000405 __ mov(v0, lhs);
Ben Murdoch257744e2011-11-30 15:57:28 +0000406 } else {
407 // Smi compared non-strictly with a non-Smi non-heap-number. Call
408 // the runtime.
409 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE));
410 }
411
412 // Rhs is a smi, lhs is a number.
413 // Convert smi rhs to double.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000414 __ sra(at, rhs, kSmiTagSize);
415 __ mtc1(at, f14);
416 __ cvt_d_w(f14, f14);
417 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000418
419 // We now have both loaded as doubles.
420 __ jmp(both_loaded_as_doubles);
421
422 __ bind(&lhs_is_smi);
423 // Lhs is a Smi. Check whether the non-smi is a heap number.
424 __ GetObjectType(rhs, t4, t4);
425 if (strict) {
426 // If lhs was not a number and rhs was a Smi then strict equality cannot
427 // succeed. Return non-equal.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100428 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000429 __ li(v0, Operand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000430 } else {
431 // Smi compared non-strictly with a non-Smi non-heap-number. Call
432 // the runtime.
433 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE));
434 }
435
436 // Lhs is a smi, rhs is a number.
437 // Convert smi lhs to double.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438 __ sra(at, lhs, kSmiTagSize);
439 __ mtc1(at, f12);
440 __ cvt_d_w(f12, f12);
441 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000442 // Fall through to both_loaded_as_doubles.
Steve Block44f0eee2011-05-26 01:26:41 +0100443}
444
445
Ben Murdoch257744e2011-11-30 15:57:28 +0000446static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
447 Register lhs,
448 Register rhs) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000449 // If either operand is a JS object or an oddball value, then they are
Ben Murdoch257744e2011-11-30 15:57:28 +0000450 // not equal since their pointers are different.
451 // There is no test for undetectability in strict equality.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000452 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +0000453 Label first_non_object;
454 // Get the type of the first operand into a2 and compare it with
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 // FIRST_JS_RECEIVER_TYPE.
Ben Murdoch257744e2011-11-30 15:57:28 +0000456 __ GetObjectType(lhs, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457 __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000458
459 // Return non-zero.
460 Label return_not_equal;
461 __ bind(&return_not_equal);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100462 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000463 __ li(v0, Operand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000464
465 __ bind(&first_non_object);
466 // Check for oddballs: true, false, null, undefined.
467 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
468
469 __ GetObjectType(rhs, a3, a3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000470 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +0000471
472 // Check for oddballs: true, false, null, undefined.
473 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
474
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475 // Now that we have the types we might as well check for
476 // internalized-internalized.
477 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
478 __ Or(a2, a2, Operand(a3));
479 __ And(at, a2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
480 __ Branch(&return_not_equal, eq, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000481}
482
483
484static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
485 Register lhs,
486 Register rhs,
487 Label* both_loaded_as_doubles,
488 Label* not_heap_numbers,
489 Label* slow) {
490 __ GetObjectType(lhs, a3, a2);
491 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE));
492 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset));
493 // If first was a heap number & second wasn't, go to slow case.
494 __ Branch(slow, ne, a3, Operand(a2));
495
496 // Both are heap numbers. Load them up then jump to the code we have
497 // for that.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000498 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset));
499 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset));
500
Ben Murdoch257744e2011-11-30 15:57:28 +0000501 __ jmp(both_loaded_as_doubles);
502}
503
504
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505// Fast negative check for internalized-to-internalized equality.
506static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100507 Register lhs, Register rhs,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000508 Label* possible_strings,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100509 Label* runtime_call) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
Ben Murdoch257744e2011-11-30 15:57:28 +0000511 (lhs.is(a1) && rhs.is(a0)));
512
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513 // a2 is object type of rhs.
Ben Murdochda12d292016-06-02 14:46:10 +0100514 Label object_test, return_equal, return_unequal, undetectable;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000516 __ And(at, a2, Operand(kIsNotStringMask));
517 __ Branch(&object_test, ne, at, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000518 __ And(at, a2, Operand(kIsNotInternalizedMask));
519 __ Branch(possible_strings, ne, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000520 __ GetObjectType(rhs, a3, a3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100521 __ Branch(runtime_call, ge, a3, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522 __ And(at, a3, Operand(kIsNotInternalizedMask));
523 __ Branch(possible_strings, ne, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000524
Ben Murdoch097c5b22016-05-18 11:27:45 +0100525 // Both are internalized. We already checked they weren't the same pointer so
526 // they are not equal. Return non-equal by returning the non-zero object
527 // pointer in v0.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100528 __ Ret(USE_DELAY_SLOT);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100529 __ mov(v0, a0); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +0000530
531 __ bind(&object_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100532 __ lw(a2, FieldMemOperand(lhs, HeapObject::kMapOffset));
533 __ lw(a3, FieldMemOperand(rhs, HeapObject::kMapOffset));
534 __ lbu(t0, FieldMemOperand(a2, Map::kBitFieldOffset));
535 __ lbu(t1, FieldMemOperand(a3, Map::kBitFieldOffset));
536 __ And(at, t0, Operand(1 << Map::kIsUndetectable));
537 __ Branch(&undetectable, ne, at, Operand(zero_reg));
538 __ And(at, t1, Operand(1 << Map::kIsUndetectable));
539 __ Branch(&return_unequal, ne, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000540
Ben Murdoch097c5b22016-05-18 11:27:45 +0100541 __ GetInstanceType(a2, a2);
542 __ Branch(runtime_call, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
543 __ GetInstanceType(a3, a3);
544 __ Branch(runtime_call, lt, a3, Operand(FIRST_JS_RECEIVER_TYPE));
545
546 __ bind(&return_unequal);
547 // Return non-equal by returning the non-zero object pointer in v0.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100548 __ Ret(USE_DELAY_SLOT);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100549 __ mov(v0, a0); // In delay slot.
550
551 __ bind(&undetectable);
552 __ And(at, t1, Operand(1 << Map::kIsUndetectable));
553 __ Branch(&return_unequal, eq, at, Operand(zero_reg));
Ben Murdochda12d292016-06-02 14:46:10 +0100554
555 // If both sides are JSReceivers, then the result is false according to
556 // the HTML specification, which says that only comparisons with null or
557 // undefined are affected by special casing for document.all.
558 __ GetInstanceType(a2, a2);
559 __ Branch(&return_equal, eq, a2, Operand(ODDBALL_TYPE));
560 __ GetInstanceType(a3, a3);
561 __ Branch(&return_unequal, ne, a3, Operand(ODDBALL_TYPE));
562
563 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100564 __ Ret(USE_DELAY_SLOT);
565 __ li(v0, Operand(EQUAL)); // In delay slot.
Steve Block44f0eee2011-05-26 01:26:41 +0100566}
567
568
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
570 Register scratch,
571 CompareICState::State expected,
572 Label* fail) {
573 Label ok;
574 if (expected == CompareICState::SMI) {
575 __ JumpIfNotSmi(input, fail);
576 } else if (expected == CompareICState::NUMBER) {
577 __ JumpIfSmi(input, &ok);
578 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
579 DONT_DO_SMI_CHECK);
Ben Murdoch257744e2011-11-30 15:57:28 +0000580 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000581 // We could be strict about internalized/string here, but as long as
582 // hydrogen doesn't care, the stub doesn't have to care either.
583 __ bind(&ok);
Steve Block44f0eee2011-05-26 01:26:41 +0100584}
585
586
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587// On entry a1 and a2 are the values to be compared.
588// On exit a0 is 0, positive or negative to indicate the result of
589// the comparison.
590void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
591 Register lhs = a1;
592 Register rhs = a0;
593 Condition cc = GetCondition();
Ben Murdoch257744e2011-11-30 15:57:28 +0000594
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 Label miss;
596 CompareICStub_CheckInputType(masm, lhs, a2, left(), &miss);
597 CompareICStub_CheckInputType(masm, rhs, a3, right(), &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +0000598
Ben Murdoch257744e2011-11-30 15:57:28 +0000599 Label slow; // Call builtin.
600 Label not_smis, both_loaded_as_doubles;
601
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 Label not_two_smis, smi_done;
603 __ Or(a2, a1, a0);
604 __ JumpIfNotSmi(a2, &not_two_smis);
605 __ sra(a1, a1, 1);
606 __ sra(a0, a0, 1);
607 __ Ret(USE_DELAY_SLOT);
608 __ subu(v0, a1, a0);
609 __ bind(&not_two_smis);
Ben Murdoch257744e2011-11-30 15:57:28 +0000610
611 // NOTICE! This code is only reached after a smi-fast-case check, so
612 // it is certain that at least one operand isn't a smi.
613
614 // Handle the case where the objects are identical. Either returns the answer
615 // or goes to slow. Only falls through if the objects were not identical.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100616 EmitIdenticalObjectComparison(masm, &slow, cc);
Ben Murdoch257744e2011-11-30 15:57:28 +0000617
618 // If either is a Smi (we know that not both are), then they can only
619 // be strictly equal if the other is a HeapNumber.
620 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000621 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622 __ And(t2, lhs, Operand(rhs));
Ben Murdoch257744e2011-11-30 15:57:28 +0000623 __ JumpIfNotSmi(t2, &not_smis, t0);
624 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
625 // 1) Return the answer.
626 // 2) Go to slow.
627 // 3) Fall through to both_loaded_as_doubles.
628 // 4) Jump to rhs_not_nan.
629 // In cases 3 and 4 we have found out we were dealing with a number-number
630 // comparison and the numbers have been loaded into f12 and f14 as doubles,
631 // or in GP registers (a0, a1, a2, a3) depending on the presence of the FPU.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000632 EmitSmiNonsmiComparison(masm, lhs, rhs,
633 &both_loaded_as_doubles, &slow, strict());
Ben Murdoch257744e2011-11-30 15:57:28 +0000634
635 __ bind(&both_loaded_as_doubles);
636 // f12, f14 are the double representations of the left hand side
637 // and the right hand side if we have FPU. Otherwise a2, a3 represent
638 // left hand side and a0, a1 represent right hand side.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 Label nan;
640 __ li(t0, Operand(LESS));
641 __ li(t1, Operand(GREATER));
642 __ li(t2, Operand(EQUAL));
Ben Murdoch257744e2011-11-30 15:57:28 +0000643
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 // Check if either rhs or lhs is NaN.
645 __ BranchF(NULL, &nan, eq, f12, f14);
Ben Murdoch257744e2011-11-30 15:57:28 +0000646
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647 // Check if LESS condition is satisfied. If true, move conditionally
648 // result to v0.
649 if (!IsMipsArchVariant(kMips32r6)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000650 __ c(OLT, D, f12, f14);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100651 __ Movt(v0, t0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000652 // Use previous check to store conditionally to v0 oposite condition
653 // (GREATER). If rhs is equal to lhs, this will be corrected in next
654 // check.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100655 __ Movf(v0, t1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000656 // Check if EQUAL condition is satisfied. If true, move conditionally
657 // result to v0.
658 __ c(EQ, D, f12, f14);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100659 __ Movt(v0, t2);
Ben Murdoch257744e2011-11-30 15:57:28 +0000660 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000661 Label skip;
662 __ BranchF(USE_DELAY_SLOT, &skip, NULL, lt, f12, f14);
663 __ mov(v0, t0); // Return LESS as result.
Ben Murdoch257744e2011-11-30 15:57:28 +0000664
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000665 __ BranchF(USE_DELAY_SLOT, &skip, NULL, eq, f12, f14);
666 __ mov(v0, t2); // Return EQUAL as result.
667
668 __ mov(v0, t1); // Return GREATER as result.
669 __ bind(&skip);
Ben Murdoch257744e2011-11-30 15:57:28 +0000670 }
671
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000672 __ Ret();
673
674 __ bind(&nan);
675 // NaN comparisons always fail.
676 // Load whatever we need in v0 to make the comparison fail.
677 DCHECK(is_int16(GREATER) && is_int16(LESS));
678 __ Ret(USE_DELAY_SLOT);
679 if (cc == lt || cc == le) {
680 __ li(v0, Operand(GREATER));
681 } else {
682 __ li(v0, Operand(LESS));
683 }
684
685
Ben Murdoch257744e2011-11-30 15:57:28 +0000686 __ bind(&not_smis);
687 // At this point we know we are dealing with two different objects,
688 // and neither of them is a Smi. The objects are in lhs_ and rhs_.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000689 if (strict()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000690 // This returns non-equal for some object types, or falls through if it
691 // was not lucky.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
Ben Murdoch257744e2011-11-30 15:57:28 +0000693 }
694
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 Label check_for_internalized_strings;
Ben Murdoch257744e2011-11-30 15:57:28 +0000696 Label flat_string_check;
697 // Check for heap-number-heap-number comparison. Can jump to slow case,
698 // or load both doubles and jump to the code that handles
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699 // that case. If the inputs are not doubles then jumps to
700 // check_for_internalized_strings.
Ben Murdoch257744e2011-11-30 15:57:28 +0000701 // In this case a2 will contain the type of lhs_.
702 EmitCheckForTwoHeapNumbers(masm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703 lhs,
704 rhs,
Ben Murdoch257744e2011-11-30 15:57:28 +0000705 &both_loaded_as_doubles,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000706 &check_for_internalized_strings,
Ben Murdoch257744e2011-11-30 15:57:28 +0000707 &flat_string_check);
708
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709 __ bind(&check_for_internalized_strings);
710 if (cc == eq && !strict()) {
711 // Returns an answer for two internalized strings or two
712 // detectable objects.
Ben Murdoch257744e2011-11-30 15:57:28 +0000713 // Otherwise jumps to string case or not both strings case.
714 // Assumes that a2 is the type of lhs_ on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000715 EmitCheckForInternalizedStringsOrObjects(
716 masm, lhs, rhs, &flat_string_check, &slow);
Ben Murdoch257744e2011-11-30 15:57:28 +0000717 }
718
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719 // Check for both being sequential one-byte strings,
720 // and inline if that is the case.
Ben Murdoch257744e2011-11-30 15:57:28 +0000721 __ bind(&flat_string_check);
722
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000723 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow);
Ben Murdoch257744e2011-11-30 15:57:28 +0000724
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
726 a3);
727 if (cc == eq) {
728 StringHelper::GenerateFlatOneByteStringEquals(masm, lhs, rhs, a2, a3, t0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000729 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000730 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, a2, a3, t0,
731 t1);
Ben Murdoch257744e2011-11-30 15:57:28 +0000732 }
733 // Never falls through to here.
734
735 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000736 if (cc == eq) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100737 {
738 FrameScope scope(masm, StackFrame::INTERNAL);
739 __ Push(lhs, rhs);
740 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
741 }
742 // Turn true into 0 and false into some non-zero value.
743 STATIC_ASSERT(EQUAL == 0);
744 __ LoadRoot(a0, Heap::kTrueValueRootIndex);
745 __ Ret(USE_DELAY_SLOT);
746 __ subu(v0, v0, a0); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +0000747 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100748 // Prepare for call to builtin. Push object pointers, a0 (lhs) first,
749 // a1 (rhs) second.
750 __ Push(lhs, rhs);
Ben Murdoch257744e2011-11-30 15:57:28 +0000751 int ncr; // NaN compare result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 if (cc == lt || cc == le) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000753 ncr = GREATER;
754 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 DCHECK(cc == gt || cc == ge); // Remaining cases.
Ben Murdoch257744e2011-11-30 15:57:28 +0000756 ncr = LESS;
757 }
758 __ li(a0, Operand(Smi::FromInt(ncr)));
759 __ push(a0);
Ben Murdoch257744e2011-11-30 15:57:28 +0000760
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000761 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
762 // tagged as a small integer.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100763 __ TailCallRuntime(Runtime::kCompare);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000764 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000765
766 __ bind(&miss);
767 GenerateMiss(masm);
Steve Block44f0eee2011-05-26 01:26:41 +0100768}
769
770
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000771void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
772 __ mov(t9, ra);
773 __ pop(ra);
774 __ PushSafepointRegisters();
775 __ Jump(t9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100776}
Ben Murdoch257744e2011-11-30 15:57:28 +0000777
Ben Murdoch257744e2011-11-30 15:57:28 +0000778
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000779void RestoreRegistersStateStub::Generate(MacroAssembler* masm) {
780 __ mov(t9, ra);
781 __ pop(ra);
782 __ PopSafepointRegisters();
783 __ Jump(t9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100784}
785
786
787void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
788 // We don't allow a GC during a store buffer overflow so there is no need to
789 // store the registers in any particular way, but we do have to store and
790 // restore them.
791 __ MultiPush(kJSCallerSaved | ra.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000792 if (save_doubles()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100793 __ MultiPushFPU(kCallerSavedFPU);
794 }
795 const int argument_count = 1;
796 const int fp_argument_count = 0;
797 const Register scratch = a1;
798
799 AllowExternalCallThatCantCauseGC scope(masm);
800 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100802 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000803 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100804 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000805 if (save_doubles()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100806 __ MultiPopFPU(kCallerSavedFPU);
807 }
808
809 __ MultiPop(kJSCallerSaved | ra.bit());
Ben Murdoch257744e2011-11-30 15:57:28 +0000810 __ Ret();
Steve Block44f0eee2011-05-26 01:26:41 +0100811}
812
813
Ben Murdoch257744e2011-11-30 15:57:28 +0000814void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100815 const Register base = a1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 const Register exponent = MathPowTaggedDescriptor::exponent();
817 DCHECK(exponent.is(a2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100818 const Register heapnumbermap = t1;
819 const Register heapnumber = v0;
820 const DoubleRegister double_base = f2;
821 const DoubleRegister double_exponent = f4;
822 const DoubleRegister double_result = f0;
823 const DoubleRegister double_scratch = f6;
824 const FPURegister single_scratch = f8;
825 const Register scratch = t5;
826 const Register scratch2 = t3;
Ben Murdoch257744e2011-11-30 15:57:28 +0000827
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100828 Label call_runtime, done, int_exponent;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000829 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100830 Label base_is_smi, unpack_exponent;
831 // The exponent and base are supplied as arguments on the stack.
832 // This can only happen if the stub is called from non-optimized code.
833 // Load input parameters from stack to double registers.
Ben Murdoch257744e2011-11-30 15:57:28 +0000834 __ lw(base, MemOperand(sp, 1 * kPointerSize));
835 __ lw(exponent, MemOperand(sp, 0 * kPointerSize));
836
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100837 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000838
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100839 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
Ben Murdoch257744e2011-11-30 15:57:28 +0000840 __ lw(scratch, FieldMemOperand(base, JSObject::kMapOffset));
841 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100842
Ben Murdochc7cc0282012-03-05 14:35:55 +0000843 __ ldc1(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100844 __ jmp(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000845
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100846 __ bind(&base_is_smi);
847 __ mtc1(scratch, single_scratch);
848 __ cvt_d_w(double_base, single_scratch);
849 __ bind(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000850
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100851 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Ben Murdoch85b71792012-04-11 18:30:58 +0100852
Ben Murdoch85b71792012-04-11 18:30:58 +0100853 __ lw(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
854 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
Ben Murdoch85b71792012-04-11 18:30:58 +0100855 __ ldc1(double_exponent,
856 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000857 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100858 // Base is already in double_base.
859 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Ben Murdoch85b71792012-04-11 18:30:58 +0100860
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100861 __ ldc1(double_exponent,
862 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000863 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100864
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000865 if (exponent_type() != INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100866 Label int_exponent_convert;
867 // Detect integer exponents stored as double.
868 __ EmitFPUTruncate(kRoundToMinusInf,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100869 scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000870 double_exponent,
871 at,
872 double_scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100873 scratch2,
874 kCheckForInexactConversion);
875 // scratch2 == 0 means there was no conversion error.
876 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
877
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000878 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100879 // Detect square root case. Crankshaft detects constant +/-0.5 at
880 // compile time and uses DoMathPowHalf instead. We then skip this check
881 // for non-constant cases of +/-0.5 as these hardly occur.
882 Label not_plus_half;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100883 // Test for 0.5.
884 __ Move(double_scratch, 0.5);
885 __ BranchF(USE_DELAY_SLOT,
886 &not_plus_half,
887 NULL,
888 ne,
889 double_exponent,
890 double_scratch);
891 // double_scratch can be overwritten in the delay slot.
892 // Calculates square root of base. Check for the special case of
893 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400894 __ Move(double_scratch, static_cast<double>(-V8_INFINITY));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100895 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
896 __ neg_d(double_result, double_scratch);
897
898 // Add +0 to convert -0 to +0.
899 __ add_d(double_scratch, double_base, kDoubleRegZero);
900 __ sqrt_d(double_result, double_scratch);
901 __ jmp(&done);
902
903 __ bind(&not_plus_half);
904 __ Move(double_scratch, -0.5);
905 __ BranchF(USE_DELAY_SLOT,
906 &call_runtime,
907 NULL,
908 ne,
909 double_exponent,
910 double_scratch);
911 // double_scratch can be overwritten in the delay slot.
912 // Calculates square root of base. Check for the special case of
913 // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400914 __ Move(double_scratch, static_cast<double>(-V8_INFINITY));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100915 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
916 __ Move(double_result, kDoubleRegZero);
917
918 // Add +0 to convert -0 to +0.
919 __ add_d(double_scratch, double_base, kDoubleRegZero);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400920 __ Move(double_result, 1.);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100921 __ sqrt_d(double_scratch, double_scratch);
922 __ div_d(double_result, double_result, double_scratch);
923 __ jmp(&done);
924 }
925
926 __ push(ra);
927 {
928 AllowExternalCallThatCantCauseGC scope(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 __ PrepareCallCFunction(0, 2, scratch2);
930 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100931 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000932 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100933 0, 2);
934 }
935 __ pop(ra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000936 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100937 __ jmp(&done);
938
939 __ bind(&int_exponent_convert);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100940 }
941
942 // Calculate power with integer exponent.
943 __ bind(&int_exponent);
944
945 // Get two copies of exponent in the registers scratch and exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 if (exponent_type() == INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100947 __ mov(scratch, exponent);
948 } else {
949 // Exponent has previously been stored into scratch as untagged integer.
950 __ mov(exponent, scratch);
951 }
952
953 __ mov_d(double_scratch, double_base); // Back up base.
954 __ Move(double_result, 1.0);
955
956 // Get absolute value of exponent.
957 Label positive_exponent;
958 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
959 __ Subu(scratch, zero_reg, scratch);
960 __ bind(&positive_exponent);
961
962 Label while_true, no_carry, loop_end;
963 __ bind(&while_true);
964
965 __ And(scratch2, scratch, 1);
966
967 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
968 __ mul_d(double_result, double_result, double_scratch);
969 __ bind(&no_carry);
970
971 __ sra(scratch, scratch, 1);
972
973 __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
974 __ mul_d(double_scratch, double_scratch, double_scratch);
975
976 __ Branch(&while_true);
977
978 __ bind(&loop_end);
979
980 __ Branch(&done, ge, exponent, Operand(zero_reg));
981 __ Move(double_scratch, 1.0);
982 __ div_d(double_result, double_scratch, double_result);
983 // Test whether result is zero. Bail out to check for subnormal result.
984 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
985 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero);
986
987 // double_exponent may not contain the exponent value if the input was a
988 // smi. We set it with exponent value before bailing out.
989 __ mtc1(exponent, single_scratch);
990 __ cvt_d_w(double_exponent, single_scratch);
991
992 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100994 // The arguments are still on the stack.
995 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100997
998 // The stub is called from non-optimized code, which expects the result
999 // as heap number in exponent.
1000 __ bind(&done);
1001 __ AllocateHeapNumber(
1002 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
1003 __ sdc1(double_result,
1004 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001005 DCHECK(heapnumber.is(v0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001006 __ DropAndRet(2);
1007 } else {
1008 __ push(ra);
1009 {
1010 AllowExternalCallThatCantCauseGC scope(masm);
1011 __ PrepareCallCFunction(0, 2, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001012 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001013 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001014 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001015 0, 2);
1016 }
1017 __ pop(ra);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001018 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001019
1020 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001021 __ Ret();
1022 }
Steve Block44f0eee2011-05-26 01:26:41 +01001023}
1024
1025
1026bool CEntryStub::NeedsImmovableCode() {
1027 return true;
1028}
1029
1030
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1032 CEntryStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001033 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1034 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1035 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1036 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001037 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001038 BinaryOpICStub::GenerateAheadOfTime(isolate);
1039 StoreRegistersStateStub::GenerateAheadOfTime(isolate);
1040 RestoreRegistersStateStub::GenerateAheadOfTime(isolate);
1041 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 StoreFastElementStub::GenerateAheadOfTime(isolate);
1043 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001044}
1045
1046
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001047void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
1048 StoreRegistersStateStub stub(isolate);
1049 stub.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001050}
1051
1052
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001053void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
1054 RestoreRegistersStateStub stub(isolate);
1055 stub.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001056}
1057
1058
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059void CodeStub::GenerateFPStubs(Isolate* isolate) {
1060 // Generate if not already in cache.
1061 SaveFPRegsMode mode = kSaveFPRegs;
1062 CEntryStub(isolate, 1, mode).GetCode();
1063 StoreBufferOverflowStub(isolate, mode).GetCode();
1064 isolate->set_fp_stubs_generated(true);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001065}
1066
1067
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001068void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1069 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1070 stub.GetCode();
1071}
Ben Murdoch257744e2011-11-30 15:57:28 +00001072
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001073
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074void CEntryStub::Generate(MacroAssembler* masm) {
1075 // Called from JavaScript; parameters are on stack as if calling JS function
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001076 // a0: number of arguments including receiver
1077 // a1: pointer to builtin function
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001078 // fp: frame pointer (restored after C call)
1079 // sp: stack pointer (restored as callee's sp after C call)
1080 // cp: current context (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081 //
1082 // If argv_in_register():
1083 // a2: pointer to the first argument
Ben Murdoch257744e2011-11-30 15:57:28 +00001084
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001085 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1086
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087 if (argv_in_register()) {
1088 // Move argv into the correct register.
1089 __ mov(s1, a2);
1090 } else {
1091 // Compute the argv pointer in a callee-saved register.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001092 __ Lsa(s1, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001093 __ Subu(s1, s1, kPointerSize);
1094 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001095
1096 // Enter the exit frame that transitions from JavaScript to C++.
1097 FrameScope scope(masm, StackFrame::MANUAL);
1098 __ EnterExitFrame(save_doubles());
1099
1100 // s0: number of arguments including receiver (C callee-saved)
1101 // s1: pointer to first argument (C callee-saved)
1102 // s2: pointer to builtin function (C callee-saved)
Ben Murdoch257744e2011-11-30 15:57:28 +00001103
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001104 // Prepare arguments for C routine.
1105 // a0 = argc
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001106 __ mov(s0, a0);
1107 __ mov(s2, a1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001108
1109 // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
1110 // also need to reserve the 4 argument slots on the stack.
1111
1112 __ AssertStackIsAligned();
1113
Ben Murdoch097c5b22016-05-18 11:27:45 +01001114 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1115 int frame_alignment_mask = frame_alignment - 1;
1116 int result_stack_size;
1117 if (result_size() <= 2) {
1118 // a0 = argc, a1 = argv, a2 = isolate
1119 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1120 __ mov(a1, s1);
1121 result_stack_size = 0;
1122 } else {
1123 DCHECK_EQ(3, result_size());
1124 // Allocate additional space for the result.
1125 result_stack_size =
1126 ((result_size() * kPointerSize) + frame_alignment_mask) &
1127 ~frame_alignment_mask;
1128 __ Subu(sp, sp, Operand(result_stack_size));
1129
1130 // a0 = hidden result argument, a1 = argc, a2 = argv, a3 = isolate.
1131 __ li(a3, Operand(ExternalReference::isolate_address(isolate())));
1132 __ mov(a2, s1);
1133 __ mov(a1, a0);
1134 __ mov(a0, sp);
1135 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001136
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001137 // To let the GC traverse the return address of the exit frames, we need to
1138 // know where the return address is. The CEntryStub is unmovable, so
1139 // we can store the address on the stack to be able to find it again and
1140 // we never have to restore it, because it will not change.
Ben Murdoch257744e2011-11-30 15:57:28 +00001141 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001142 int kNumInstructionsToJump = 4;
Ben Murdoch257744e2011-11-30 15:57:28 +00001143 Label find_ra;
Ben Murdoch257744e2011-11-30 15:57:28 +00001144 // Adjust the value in ra to point to the correct return location, 2nd
1145 // instruction past the real call into C code (the jalr(t9)), and push it.
1146 // This is the return address of the exit frame.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001147 if (kArchVariant >= kMips32r6) {
1148 __ addiupc(ra, kNumInstructionsToJump + 1);
1149 } else {
1150 // This branch-and-link sequence is needed to find the current PC on mips
1151 // before r6, saved to the ra register.
1152 __ bal(&find_ra); // bal exposes branch delay slot.
1153 __ Addu(ra, ra, kNumInstructionsToJump * Instruction::kInstrSize);
1154 }
1155 __ bind(&find_ra);
1156
1157 // This spot was reserved in EnterExitFrame.
1158 __ sw(ra, MemOperand(sp, result_stack_size));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001159 // Stack space reservation moved to the branch delay slot below.
Ben Murdoch257744e2011-11-30 15:57:28 +00001160 // Stack is still aligned.
1161
1162 // Call the C routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001163 __ mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC.
1164 __ jalr(t9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001165 // Set up sp in the delay slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001166 __ addiu(sp, sp, -kCArgsSlotsSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001167 // Make sure the stored 'ra' points to this position.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168 DCHECK_EQ(kNumInstructionsToJump,
Ben Murdoch257744e2011-11-30 15:57:28 +00001169 masm->InstructionsGeneratedSince(&find_ra));
1170 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001171 if (result_size() > 2) {
1172 DCHECK_EQ(3, result_size());
1173 // Read result values stored on stack.
1174 __ lw(a0, MemOperand(v0, 2 * kPointerSize));
1175 __ lw(v1, MemOperand(v0, 1 * kPointerSize));
1176 __ lw(v0, MemOperand(v0, 0 * kPointerSize));
1177 }
1178 // Result returned in v0, v1:v0 or a0:v1:v0 - do not destroy these registers!
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001179
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001180 // Check result for exception sentinel.
1181 Label exception_returned;
1182 __ LoadRoot(t0, Heap::kExceptionRootIndex);
1183 __ Branch(&exception_returned, eq, t0, Operand(v0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001184
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001185 // Check that there is no pending exception, otherwise we
1186 // should have returned the exception sentinel.
1187 if (FLAG_debug_code) {
1188 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 ExternalReference pending_exception_address(
1190 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191 __ li(a2, Operand(pending_exception_address));
1192 __ lw(a2, MemOperand(a2));
1193 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1194 // Cannot use check here as it attempts to generate call into runtime.
1195 __ Branch(&okay, eq, t0, Operand(a2));
1196 __ stop("Unexpected pending exception");
1197 __ bind(&okay);
1198 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001199
1200 // Exit C frame and return.
1201 // v0:v1: result
1202 // sp: stack pointer
1203 // fp: frame pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001204 Register argc;
1205 if (argv_in_register()) {
1206 // We don't want to pop arguments so set argc to no_reg.
1207 argc = no_reg;
1208 } else {
1209 // s0: still holds argc (callee-saved).
1210 argc = s0;
1211 }
1212 __ LeaveExitFrame(save_doubles(), argc, true, EMIT_RETURN);
Ben Murdoch257744e2011-11-30 15:57:28 +00001213
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214 // Handling of exception.
1215 __ bind(&exception_returned);
Ben Murdoch257744e2011-11-30 15:57:28 +00001216
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001217 ExternalReference pending_handler_context_address(
1218 Isolate::kPendingHandlerContextAddress, isolate());
1219 ExternalReference pending_handler_code_address(
1220 Isolate::kPendingHandlerCodeAddress, isolate());
1221 ExternalReference pending_handler_offset_address(
1222 Isolate::kPendingHandlerOffsetAddress, isolate());
1223 ExternalReference pending_handler_fp_address(
1224 Isolate::kPendingHandlerFPAddress, isolate());
1225 ExternalReference pending_handler_sp_address(
1226 Isolate::kPendingHandlerSPAddress, isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00001227
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001228 // Ask the runtime for help to determine the handler. This will set v0 to
1229 // contain the current pending exception, don't clobber it.
1230 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1231 isolate());
1232 {
1233 FrameScope scope(masm, StackFrame::MANUAL);
1234 __ PrepareCallCFunction(3, 0, a0);
1235 __ mov(a0, zero_reg);
1236 __ mov(a1, zero_reg);
1237 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1238 __ CallCFunction(find_handler, 3);
1239 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001240
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001241 // Retrieve the handler context, SP and FP.
1242 __ li(cp, Operand(pending_handler_context_address));
1243 __ lw(cp, MemOperand(cp));
1244 __ li(sp, Operand(pending_handler_sp_address));
1245 __ lw(sp, MemOperand(sp));
1246 __ li(fp, Operand(pending_handler_fp_address));
1247 __ lw(fp, MemOperand(fp));
Ben Murdoch257744e2011-11-30 15:57:28 +00001248
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001249 // If the handler is a JS frame, restore the context to the frame. Note that
1250 // the context will be set to (cp == 0) for non-JS frames.
1251 Label zero;
1252 __ Branch(&zero, eq, cp, Operand(zero_reg));
1253 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1254 __ bind(&zero);
Ben Murdoch257744e2011-11-30 15:57:28 +00001255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001256 // Compute the handler entry address and jump to it.
1257 __ li(a1, Operand(pending_handler_code_address));
1258 __ lw(a1, MemOperand(a1));
1259 __ li(a2, Operand(pending_handler_offset_address));
1260 __ lw(a2, MemOperand(a2));
1261 __ Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
1262 __ Addu(t9, a1, a2);
1263 __ Jump(t9);
Steve Block44f0eee2011-05-26 01:26:41 +01001264}
1265
1266
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001267void JSEntryStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001268 Label invoke, handler_entry, exit;
1269 Isolate* isolate = masm->isolate();
Ben Murdoch257744e2011-11-30 15:57:28 +00001270
1271 // Registers:
1272 // a0: entry address
1273 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001274 // a2: receiver
Ben Murdoch257744e2011-11-30 15:57:28 +00001275 // a3: argc
1276 //
1277 // Stack:
1278 // 4 args slots
1279 // args
1280
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001281 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1282
Ben Murdoch257744e2011-11-30 15:57:28 +00001283 // Save callee saved registers on the stack.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001284 __ MultiPush(kCalleeSaved | ra.bit());
Ben Murdoch257744e2011-11-30 15:57:28 +00001285
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001286 // Save callee-saved FPU registers.
1287 __ MultiPushFPU(kCalleeSavedFPU);
1288 // Set up the reserved register for 0.0.
1289 __ Move(kDoubleRegZero, 0.0);
Ben Murdoch589d6972011-11-30 16:04:58 +00001290
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001291
Ben Murdoch257744e2011-11-30 15:57:28 +00001292 // Load argv in s0 register.
Ben Murdoch589d6972011-11-30 16:04:58 +00001293 int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001294 offset_to_argv += kNumCalleeSavedFPU * kDoubleSize;
Ben Murdoch589d6972011-11-30 16:04:58 +00001295
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001296 __ InitializeRootRegister();
Ben Murdoch589d6972011-11-30 16:04:58 +00001297 __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00001298
1299 // We build an EntryFrame.
1300 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001301 int marker = type();
Ben Murdoch257744e2011-11-30 15:57:28 +00001302 __ li(t2, Operand(Smi::FromInt(marker)));
1303 __ li(t1, Operand(Smi::FromInt(marker)));
Ben Murdoch589d6972011-11-30 16:04:58 +00001304 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001305 isolate)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001306 __ lw(t0, MemOperand(t0));
1307 __ Push(t3, t2, t1, t0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001308 // Set up frame pointer for the frame to be pushed.
Ben Murdoch257744e2011-11-30 15:57:28 +00001309 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
1310
1311 // Registers:
1312 // a0: entry_address
1313 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001314 // a2: receiver_pointer
Ben Murdoch257744e2011-11-30 15:57:28 +00001315 // a3: argc
1316 // s0: argv
1317 //
1318 // Stack:
1319 // caller fp |
1320 // function slot | entry frame
1321 // context slot |
1322 // bad fp (0xff...f) |
1323 // callee saved registers + ra
1324 // 4 args slots
1325 // args
1326
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001327 // If this is the outermost JS call, set js_entry_sp value.
1328 Label non_outermost_js;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001329 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001330 __ li(t1, Operand(ExternalReference(js_entry_sp)));
1331 __ lw(t2, MemOperand(t1));
1332 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
1333 __ sw(fp, MemOperand(t1));
1334 __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1335 Label cont;
1336 __ b(&cont);
1337 __ nop(); // Branch delay slot nop.
1338 __ bind(&non_outermost_js);
1339 __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1340 __ bind(&cont);
1341 __ push(t0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001342
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001343 // Jump to a faked try block that does the invoke, with a faked catch
1344 // block that sets the pending exception.
1345 __ jmp(&invoke);
1346 __ bind(&handler_entry);
1347 handler_offset_ = handler_entry.pos();
1348 // Caught exception: Store result (exception) in the pending exception
1349 // field in the JSEnv and return a failure sentinel. Coming in here the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001350 // fp will be invalid because the PushStackHandler below sets it to 0 to
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001351 // signal the existence of the JSEntry frame.
Ben Murdoch589d6972011-11-30 16:04:58 +00001352 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001353 isolate)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001354 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001355 __ LoadRoot(v0, Heap::kExceptionRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001356 __ b(&exit); // b exposes branch delay slot.
1357 __ nop(); // Branch delay slot nop.
1358
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001359 // Invoke: Link this frame into the handler chain.
Ben Murdoch257744e2011-11-30 15:57:28 +00001360 __ bind(&invoke);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 __ PushStackHandler();
Ben Murdoch257744e2011-11-30 15:57:28 +00001362 // If an exception not caught by another handler occurs, this handler
1363 // returns control to the code after the bal(&invoke) above, which
1364 // restores all kCalleeSaved registers (including cp and fp) to their
1365 // saved values before returning a failure to C.
1366
1367 // Clear any pending exceptions.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001368 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
Ben Murdoch589d6972011-11-30 16:04:58 +00001369 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001370 isolate)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001371 __ sw(t1, MemOperand(t0));
1372
1373 // Invoke the function by calling through JS entry trampoline builtin.
1374 // Notice that we cannot store a reference to the trampoline code directly in
1375 // this stub, because runtime stubs are not traversed when doing GC.
1376
1377 // Registers:
1378 // a0: entry_address
1379 // a1: function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001380 // a2: receiver_pointer
Ben Murdoch257744e2011-11-30 15:57:28 +00001381 // a3: argc
1382 // s0: argv
1383 //
1384 // Stack:
1385 // handler frame
1386 // entry frame
1387 // callee saved registers + ra
1388 // 4 args slots
1389 // args
1390
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001392 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001393 isolate);
Ben Murdoch257744e2011-11-30 15:57:28 +00001394 __ li(t0, Operand(construct_entry));
1395 } else {
1396 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
1397 __ li(t0, Operand(entry));
1398 }
1399 __ lw(t9, MemOperand(t0)); // Deref address.
1400
1401 // Call JSEntryTrampoline.
1402 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
1403 __ Call(t9);
1404
1405 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001406 __ PopStackHandler();
Ben Murdoch257744e2011-11-30 15:57:28 +00001407
1408 __ bind(&exit); // v0 holds result
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001409 // Check if the current stack frame is marked as the outermost JS frame.
1410 Label non_outermost_js_2;
1411 __ pop(t1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001412 __ Branch(&non_outermost_js_2,
1413 ne,
1414 t1,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001415 Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1416 __ li(t1, Operand(ExternalReference(js_entry_sp)));
1417 __ sw(zero_reg, MemOperand(t1));
1418 __ bind(&non_outermost_js_2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001419
1420 // Restore the top frame descriptors from the stack.
1421 __ pop(t1);
Ben Murdoch589d6972011-11-30 16:04:58 +00001422 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001423 isolate)));
Ben Murdoch257744e2011-11-30 15:57:28 +00001424 __ sw(t1, MemOperand(t0));
1425
1426 // Reset the stack to the callee saved registers.
1427 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
1428
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001429 // Restore callee-saved fpu registers.
1430 __ MultiPopFPU(kCalleeSavedFPU);
Ben Murdoch589d6972011-11-30 16:04:58 +00001431
Ben Murdoch257744e2011-11-30 15:57:28 +00001432 // Restore callee saved registers from the stack.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001433 __ MultiPop(kCalleeSaved | ra.bit());
Ben Murdoch257744e2011-11-30 15:57:28 +00001434 // Return.
1435 __ Jump(ra);
Steve Block44f0eee2011-05-26 01:26:41 +01001436}
1437
1438
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001439void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1440 // Return address is in ra.
1441 Label miss;
1442
1443 Register receiver = LoadDescriptor::ReceiverRegister();
1444 Register index = LoadDescriptor::NameRegister();
1445 Register scratch = t1;
1446 Register result = v0;
1447 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001449
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001450 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1451 &miss, // When not a string.
1452 &miss, // When not a number.
1453 &miss, // When index out of range.
1454 STRING_INDEX_IS_ARRAY_INDEX,
1455 RECEIVER_IS_STRING);
1456 char_at_generator.GenerateFast(masm);
1457 __ Ret();
1458
1459 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001460 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001461
1462 __ bind(&miss);
1463 PropertyAccessCompiler::TailCallBuiltin(
1464 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1465}
1466
1467
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001468void InstanceOfStub::Generate(MacroAssembler* masm) {
1469 Register const object = a1; // Object (lhs).
1470 Register const function = a0; // Function (rhs).
1471 Register const object_map = a2; // Map of {object}.
1472 Register const function_map = a3; // Map of {function}.
1473 Register const function_prototype = t0; // Prototype of {function}.
1474 Register const scratch = t1;
Ben Murdoch257744e2011-11-30 15:57:28 +00001475
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001476 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
1477 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001478
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001479 // Check if {object} is a smi.
1480 Label object_is_smi;
1481 __ JumpIfSmi(object, &object_is_smi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001482
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001483 // Lookup the {function} and the {object} map in the global instanceof cache.
1484 // Note: This is safe because we clear the global instanceof cache whenever
1485 // we change the prototype of any object.
1486 Label fast_case, slow_case;
1487 __ lw(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
1488 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
1489 __ Branch(&fast_case, ne, function, Operand(at));
1490 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
1491 __ Branch(&fast_case, ne, object_map, Operand(at));
1492 __ Ret(USE_DELAY_SLOT);
1493 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001494
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001495 // If {object} is a smi we can safely return false if {function} is a JS
1496 // function, otherwise we have to miss to the runtime and throw an exception.
1497 __ bind(&object_is_smi);
1498 __ JumpIfSmi(function, &slow_case);
1499 __ GetObjectType(function, function_map, scratch);
1500 __ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE));
1501 __ Ret(USE_DELAY_SLOT);
1502 __ LoadRoot(v0, Heap::kFalseValueRootIndex); // In delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001503
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001504 // Fast-case: The {function} must be a valid JSFunction.
1505 __ bind(&fast_case);
1506 __ JumpIfSmi(function, &slow_case);
1507 __ GetObjectType(function, function_map, scratch);
1508 __ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001509
Ben Murdochda12d292016-06-02 14:46:10 +01001510 // Go to the runtime if the function is not a constructor.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511 __ lbu(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01001512 __ And(at, scratch, Operand(1 << Map::kIsConstructor));
1513 __ Branch(&slow_case, eq, at, Operand(zero_reg));
1514
1515 // Ensure that {function} has an instance prototype.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 __ And(at, scratch, Operand(1 << Map::kHasNonInstancePrototype));
1517 __ Branch(&slow_case, ne, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00001518
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001519 // Get the "prototype" (or initial map) of the {function}.
1520 __ lw(function_prototype,
1521 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1522 __ AssertNotSmi(function_prototype);
Ben Murdoch257744e2011-11-30 15:57:28 +00001523
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001524 // Resolve the prototype if the {function} has an initial map. Afterwards the
1525 // {function_prototype} will be either the JSReceiver prototype object or the
1526 // hole value, which means that no instances of the {function} were created so
1527 // far and hence we should return false.
1528 Label function_prototype_valid;
1529 __ GetObjectType(function_prototype, scratch, scratch);
1530 __ Branch(&function_prototype_valid, ne, scratch, Operand(MAP_TYPE));
1531 __ lw(function_prototype,
1532 FieldMemOperand(function_prototype, Map::kPrototypeOffset));
1533 __ bind(&function_prototype_valid);
1534 __ AssertNotSmi(function_prototype);
Ben Murdoch257744e2011-11-30 15:57:28 +00001535
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 // Update the global instanceof cache with the current {object} map and
1537 // {function}. The cached answer will be set when it is known below.
1538 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1539 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001541 // Loop through the prototype chain looking for the {function} prototype.
1542 // Assume true, and change to false if not found.
1543 Register const object_instance_type = function_map;
1544 Register const map_bit_field = function_map;
1545 Register const null = scratch;
1546 Register const result = v0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001547
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001548 Label done, loop, fast_runtime_fallback;
1549 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1550 __ LoadRoot(null, Heap::kNullValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001551 __ bind(&loop);
Ben Murdoch257744e2011-11-30 15:57:28 +00001552
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001553 // Check if the object needs to be access checked.
1554 __ lbu(map_bit_field, FieldMemOperand(object_map, Map::kBitFieldOffset));
1555 __ And(map_bit_field, map_bit_field, Operand(1 << Map::kIsAccessCheckNeeded));
1556 __ Branch(&fast_runtime_fallback, ne, map_bit_field, Operand(zero_reg));
1557 // Check if the current object is a Proxy.
1558 __ lbu(object_instance_type,
1559 FieldMemOperand(object_map, Map::kInstanceTypeOffset));
1560 __ Branch(&fast_runtime_fallback, eq, object_instance_type,
1561 Operand(JS_PROXY_TYPE));
1562
1563 __ lw(object, FieldMemOperand(object_map, Map::kPrototypeOffset));
1564 __ Branch(&done, eq, object, Operand(function_prototype));
1565 __ Branch(USE_DELAY_SLOT, &loop, ne, object, Operand(null));
1566 __ lw(object_map,
1567 FieldMemOperand(object, HeapObject::kMapOffset)); // In delay slot.
1568 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1569 __ bind(&done);
1570 __ Ret(USE_DELAY_SLOT);
1571 __ StoreRoot(result,
1572 Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
1573
1574 // Found Proxy or access check needed: Call the runtime
1575 __ bind(&fast_runtime_fallback);
1576 __ Push(object, function_prototype);
1577 // Invalidate the instanceof cache.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001578 DCHECK(Smi::FromInt(0) == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001579 __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex);
1580 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001581
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001582 // Slow-case: Call the %InstanceOf runtime function.
1583 __ bind(&slow_case);
1584 __ Push(object, function);
Ben Murdochda12d292016-06-02 14:46:10 +01001585 __ TailCallRuntime(is_es6_instanceof() ? Runtime::kOrdinaryHasInstance
1586 : Runtime::kInstanceOf);
Steve Block44f0eee2011-05-26 01:26:41 +01001587}
1588
1589
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001590void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1591 Label miss;
1592 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001593 // Ensure that the vector and slot registers won't be clobbered before
1594 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001595 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::VectorRegister(),
1596 LoadWithVectorDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001597
1598 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, t0,
1599 t1, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001600 __ bind(&miss);
1601 PropertyAccessCompiler::TailCallBuiltin(
1602 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1603}
Ben Murdoch257744e2011-11-30 15:57:28 +00001604
Steve Block44f0eee2011-05-26 01:26:41 +01001605void RegExpExecStub::Generate(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001606 // Just jump directly to runtime if native RegExp is not selected at compile
1607 // time or if regexp entry in generated code is turned off runtime switch or
1608 // at compilation.
1609#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdoch257744e2011-11-30 15:57:28 +00001611#else // V8_INTERPRETED_REGEXP
Ben Murdoch257744e2011-11-30 15:57:28 +00001612
1613 // Stack frame on entry.
1614 // sp[0]: last_match_info (expected JSArray)
1615 // sp[4]: previous index
1616 // sp[8]: subject string
1617 // sp[12]: JSRegExp object
1618
Ben Murdochdb1b4382012-04-26 19:03:50 +01001619 const int kLastMatchInfoOffset = 0 * kPointerSize;
1620 const int kPreviousIndexOffset = 1 * kPointerSize;
1621 const int kSubjectOffset = 2 * kPointerSize;
1622 const int kJSRegExpOffset = 3 * kPointerSize;
Ben Murdoch257744e2011-11-30 15:57:28 +00001623
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001624 Label runtime;
Ben Murdoch257744e2011-11-30 15:57:28 +00001625 // Allocation of registers for this function. These are in callee save
1626 // registers and will be preserved by the call to the native RegExp code, as
1627 // this code is called using the normal C calling convention. When calling
1628 // directly from generated code the native RegExp code will not do a GC and
1629 // therefore the content of these registers are safe to use after the call.
1630 // MIPS - using s0..s2, since we are not using CEntry Stub.
1631 Register subject = s0;
1632 Register regexp_data = s1;
1633 Register last_match_info_elements = s2;
1634
1635 // Ensure that a RegExp stack is allocated.
1636 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdoch097c5b22016-05-18 11:27:45 +01001637 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00001638 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001639 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00001640 __ li(a0, Operand(address_of_regexp_stack_memory_size));
1641 __ lw(a0, MemOperand(a0, 0));
1642 __ Branch(&runtime, eq, a0, Operand(zero_reg));
1643
1644 // Check that the first argument is a JSRegExp object.
1645 __ lw(a0, MemOperand(sp, kJSRegExpOffset));
1646 STATIC_ASSERT(kSmiTag == 0);
1647 __ JumpIfSmi(a0, &runtime);
1648 __ GetObjectType(a0, a1, a1);
1649 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE));
1650
1651 // Check that the RegExp has been compiled (data contains a fixed array).
1652 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset));
1653 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001654 __ SmiTst(regexp_data, t0);
Ben Murdoch257744e2011-11-30 15:57:28 +00001655 __ Check(nz,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001656 kUnexpectedTypeForRegExpDataFixedArrayExpected,
Ben Murdoch257744e2011-11-30 15:57:28 +00001657 t0,
1658 Operand(zero_reg));
1659 __ GetObjectType(regexp_data, a0, a0);
1660 __ Check(eq,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001661 kUnexpectedTypeForRegExpDataFixedArrayExpected,
Ben Murdoch257744e2011-11-30 15:57:28 +00001662 a0,
1663 Operand(FIXED_ARRAY_TYPE));
1664 }
1665
1666 // regexp_data: RegExp data (FixedArray)
1667 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1668 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
1669 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
1670
1671 // regexp_data: RegExp data (FixedArray)
1672 // Check that the number of captures fit in the static offsets vector buffer.
1673 __ lw(a2,
1674 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001675 // Check (number_of_captures + 1) * 2 <= offsets vector size
1676 // Or number_of_captures * 2 <= offsets vector size - 2
1677 // Multiplying by 2 comes for free since a2 is smi-tagged.
Ben Murdoch257744e2011-11-30 15:57:28 +00001678 STATIC_ASSERT(kSmiTag == 0);
1679 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001680 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1681 __ Branch(
1682 &runtime, hi, a2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001683
1684 // Reset offset for possibly sliced string.
1685 __ mov(t0, zero_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 __ lw(subject, MemOperand(sp, kSubjectOffset));
1687 __ JumpIfSmi(subject, &runtime);
1688 __ mov(a3, subject); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001689 // subject: subject string
1690 // a3: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001691 // regexp_data: RegExp data (FixedArray)
1692 // Handle subject string according to its encoding and representation:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001693 // (1) Sequential string? If yes, go to (4).
1694 // (2) Sequential or cons? If not, go to (5).
1695 // (3) Cons string. If the string is flat, replace subject with first string
1696 // and go to (1). Otherwise bail out to runtime.
1697 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001698 // (E) Carry on.
1699 /// [...]
1700
1701 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001702 // (5) Long external string? If not, go to (7).
1703 // (6) External string. Make it, offset-wise, look like a sequential string.
1704 // Go to (4).
1705 // (7) Short external string or not a string? If yes, bail out to runtime.
1706 // (8) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001707
Ben Murdoch097c5b22016-05-18 11:27:45 +01001708 Label seq_string /* 4 */, external_string /* 6 */, check_underlying /* 1 */,
1709 not_seq_nor_cons /* 5 */, not_long_external /* 7 */;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001710
Ben Murdoch097c5b22016-05-18 11:27:45 +01001711 __ bind(&check_underlying);
1712 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
1713 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
1714
1715 // (1) Sequential string? If yes, go to (4).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001716 __ And(a1,
1717 a0,
1718 Operand(kIsNotStringMask |
1719 kStringRepresentationMask |
1720 kShortExternalStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00001721 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); // Go to (5).
Ben Murdoch257744e2011-11-30 15:57:28 +00001723
Ben Murdoch097c5b22016-05-18 11:27:45 +01001724 // (2) Sequential or cons? If not, go to (5).
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001725 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1726 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001727 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1728 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001729 // Go to (5).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001730 __ Branch(&not_seq_nor_cons, ge, a1, Operand(kExternalStringTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001731
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732 // (3) Cons string. Check that it's flat.
1733 // Replace subject with first string and reload instance type.
Ben Murdoch257744e2011-11-30 15:57:28 +00001734 __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001735 __ LoadRoot(a1, Heap::kempty_stringRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001736 __ Branch(&runtime, ne, a0, Operand(a1));
1737 __ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001738 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001739
Ben Murdoch097c5b22016-05-18 11:27:45 +01001740 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdoch257744e2011-11-30 15:57:28 +00001741 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001742 // subject: sequential subject string (or look-alike, external string)
1743 // a3: original subject string
1744 // Load previous index and check range before a3 is overwritten. We have to
1745 // use a3 instead of subject here because subject might have been only made
1746 // to look like a sequential string when it actually is an external string.
1747 __ lw(a1, MemOperand(sp, kPreviousIndexOffset));
1748 __ JumpIfNotSmi(a1, &runtime);
1749 __ lw(a3, FieldMemOperand(a3, String::kLengthOffset));
1750 __ Branch(&runtime, ls, a3, Operand(a1));
1751 __ sra(a1, a1, kSmiTagSize); // Untag the Smi.
1752
Ben Murdoch257744e2011-11-30 15:57:28 +00001753 STATIC_ASSERT(kStringEncodingMask == 4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001754 STATIC_ASSERT(kOneByteStringTag == 4);
Ben Murdoch257744e2011-11-30 15:57:28 +00001755 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001756 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for one-byte.
1757 __ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001758 __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below).
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001759 __ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001760 __ Movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
Ben Murdoch257744e2011-11-30 15:57:28 +00001761
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001762 // (E) Carry on. String handling is done.
1763 // t9: irregexp code
Ben Murdoch257744e2011-11-30 15:57:28 +00001764 // Check that the irregexp code has been generated for the actual string
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001765 // encoding. If it has, the field contains a code object otherwise it contains
1766 // a smi (code flushing support).
1767 __ JumpIfSmi(t9, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00001768
Ben Murdoch257744e2011-11-30 15:57:28 +00001769 // a1: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001770 // a3: encoding of subject string (1 if one_byte, 0 if two_byte);
Ben Murdoch257744e2011-11-30 15:57:28 +00001771 // t9: code
1772 // subject: Subject string
1773 // regexp_data: RegExp data (FixedArray)
1774 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001775 __ IncrementCounter(isolate()->counters()->regexp_entry_native(),
Ben Murdoch257744e2011-11-30 15:57:28 +00001776 1, a0, a2);
1777
1778 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001779 const int kRegExpExecuteArguments = 9;
Ben Murdochdb1b4382012-04-26 19:03:50 +01001780 const int kParameterRegisters = 4;
Ben Murdoch257744e2011-11-30 15:57:28 +00001781 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
1782
1783 // Stack pointer now points to cell where return address is to be written.
1784 // Arguments are before that on the stack or in registers, meaning we
1785 // treat the return address as argument 5. Thus every argument after that
1786 // needs to be shifted back by 1. Since DirectCEntryStub will handle
1787 // allocating space for the c argument slots, we don't need to calculate
1788 // that into the argument positions on the stack. This is how the stack will
1789 // look (sp meaning the value of sp at this moment):
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001790 // [sp + 5] - Argument 9
Ben Murdoch257744e2011-11-30 15:57:28 +00001791 // [sp + 4] - Argument 8
1792 // [sp + 3] - Argument 7
1793 // [sp + 2] - Argument 6
1794 // [sp + 1] - Argument 5
1795 // [sp + 0] - saved ra
1796
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001797 // Argument 9: Pass current isolate address.
Ben Murdoch257744e2011-11-30 15:57:28 +00001798 // CFunctionArgumentOperand handles MIPS stack argument slots.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001799 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
1800 __ sw(a0, MemOperand(sp, 5 * kPointerSize));
1801
1802 // Argument 8: Indicate that this is a direct call from JavaScript.
1803 __ li(a0, Operand(1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001804 __ sw(a0, MemOperand(sp, 4 * kPointerSize));
1805
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001806 // Argument 7: Start (high end) of backtracking stack memory area.
Ben Murdoch257744e2011-11-30 15:57:28 +00001807 __ li(a0, Operand(address_of_regexp_stack_memory_address));
1808 __ lw(a0, MemOperand(a0, 0));
1809 __ li(a2, Operand(address_of_regexp_stack_memory_size));
1810 __ lw(a2, MemOperand(a2, 0));
1811 __ addu(a0, a0, a2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001812 __ sw(a0, MemOperand(sp, 3 * kPointerSize));
1813
1814 // Argument 6: Set the number of capture registers to zero to force global
1815 // regexps to behave as non-global. This does not affect non-global regexps.
1816 __ mov(a0, zero_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00001817 __ sw(a0, MemOperand(sp, 2 * kPointerSize));
1818
1819 // Argument 5: static offsets vector buffer.
1820 __ li(a0, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001821 ExternalReference::address_of_static_offsets_vector(isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00001822 __ sw(a0, MemOperand(sp, 1 * kPointerSize));
1823
1824 // For arguments 4 and 3 get string length, calculate start of string data
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001825 // calculate the shift of the index (0 for one-byte and 1 for two-byte).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001826 __ Addu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00001827 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001828 // Load the length from the original subject string from the previous stack
1829 // frame. Therefore we have to use fp, which points exactly to two pointer
1830 // sizes below the previous sp. (Because creating a new stack frame pushes
1831 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
Ben Murdoch589d6972011-11-30 16:04:58 +00001832 __ lw(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001833 // If slice offset is not 0, load the length from the original sliced string.
1834 // Argument 4, a3: End of string data
1835 // Argument 3, a2: Start of string data
1836 // Prepare start and end index of the input.
1837 __ sllv(t1, t0, a3);
1838 __ addu(t0, t2, t1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001839 __ sllv(t1, a1, a3);
1840 __ addu(a2, t0, t1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001841
Ben Murdoch589d6972011-11-30 16:04:58 +00001842 __ lw(t2, FieldMemOperand(subject, String::kLengthOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001843 __ sra(t2, t2, kSmiTagSize);
1844 __ sllv(t1, t2, a3);
1845 __ addu(a3, t0, t1);
Ben Murdoch257744e2011-11-30 15:57:28 +00001846 // Argument 2 (a1): Previous index.
1847 // Already there
1848
1849 // Argument 1 (a0): Subject string.
Ben Murdoch589d6972011-11-30 16:04:58 +00001850 __ mov(a0, subject);
Ben Murdoch257744e2011-11-30 15:57:28 +00001851
1852 // Locate the code entry and call it.
1853 __ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001854 DirectCEntryStub stub(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00001855 stub.GenerateCall(masm, t9);
1856
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857 __ LeaveExitFrame(false, no_reg, true);
Ben Murdoch257744e2011-11-30 15:57:28 +00001858
1859 // v0: result
1860 // subject: subject string (callee saved)
1861 // regexp_data: RegExp data (callee saved)
1862 // last_match_info_elements: Last match info elements (callee saved)
Ben Murdoch257744e2011-11-30 15:57:28 +00001863 // Check the result.
Ben Murdoch257744e2011-11-30 15:57:28 +00001864 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001865 __ Branch(&success, eq, v0, Operand(1));
1866 // We expect exactly one result since we force the called regexp to behave
1867 // as non-global.
Ben Murdoch257744e2011-11-30 15:57:28 +00001868 Label failure;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001869 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
Ben Murdoch257744e2011-11-30 15:57:28 +00001870 // If not exception it can only be retry. Handle that in the runtime system.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001871 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
Ben Murdoch257744e2011-11-30 15:57:28 +00001872 // Result must now be exception. If there is no pending exception already a
1873 // stack overflow (on the backtrack stack) was detected in RegExp code but
1874 // haven't created the exception yet. Handle that in the runtime system.
1875 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001876 __ li(a1, Operand(isolate()->factory()->the_hole_value()));
Ben Murdoch589d6972011-11-30 16:04:58 +00001877 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001878 isolate())));
Ben Murdoch257744e2011-11-30 15:57:28 +00001879 __ lw(v0, MemOperand(a2, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00001880 __ Branch(&runtime, eq, v0, Operand(a1));
Ben Murdoch257744e2011-11-30 15:57:28 +00001881
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001882 // For exception, throw the exception again.
1883 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoch257744e2011-11-30 15:57:28 +00001884
1885 __ bind(&failure);
1886 // For failure and exception return null.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001887 __ li(v0, Operand(isolate()->factory()->null_value()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001888 __ DropAndRet(4);
Ben Murdoch257744e2011-11-30 15:57:28 +00001889
1890 // Process the result from the native regexp code.
1891 __ bind(&success);
1892 __ lw(a1,
1893 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
1894 // Calculate number of capture registers (number_of_captures + 1) * 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001895 // Multiplying by 2 comes for free since r1 is smi-tagged.
Ben Murdoch257744e2011-11-30 15:57:28 +00001896 STATIC_ASSERT(kSmiTag == 0);
1897 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1898 __ Addu(a1, a1, Operand(2)); // a1 was a smi.
1899
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001900 __ lw(a0, MemOperand(sp, kLastMatchInfoOffset));
1901 __ JumpIfSmi(a0, &runtime);
1902 __ GetObjectType(a0, a2, a2);
1903 __ Branch(&runtime, ne, a2, Operand(JS_ARRAY_TYPE));
1904 // Check that the JSArray is in fast case.
1905 __ lw(last_match_info_elements,
1906 FieldMemOperand(a0, JSArray::kElementsOffset));
1907 __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
1908 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
1909 __ Branch(&runtime, ne, a0, Operand(at));
1910 // Check that the last match info has space for the capture registers and the
1911 // additional information.
1912 __ lw(a0,
1913 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
1914 __ Addu(a2, a1, Operand(RegExpImpl::kLastMatchOverhead));
1915 __ sra(at, a0, kSmiTagSize);
1916 __ Branch(&runtime, gt, a2, Operand(at));
1917
Ben Murdoch257744e2011-11-30 15:57:28 +00001918 // a1: number of capture registers
1919 // subject: subject string
1920 // Store the capture count.
1921 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi.
1922 __ sw(a2, FieldMemOperand(last_match_info_elements,
1923 RegExpImpl::kLastCaptureCountOffset));
1924 // Store last subject and last input.
Ben Murdoch257744e2011-11-30 15:57:28 +00001925 __ sw(subject,
1926 FieldMemOperand(last_match_info_elements,
1927 RegExpImpl::kLastSubjectOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001928 __ mov(a2, subject);
1929 __ RecordWriteField(last_match_info_elements,
1930 RegExpImpl::kLastSubjectOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001931 subject,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001932 t3,
1933 kRAHasNotBeenSaved,
1934 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001935 __ mov(subject, a2);
Ben Murdoch257744e2011-11-30 15:57:28 +00001936 __ sw(subject,
1937 FieldMemOperand(last_match_info_elements,
1938 RegExpImpl::kLastInputOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001939 __ RecordWriteField(last_match_info_elements,
1940 RegExpImpl::kLastInputOffset,
1941 subject,
1942 t3,
1943 kRAHasNotBeenSaved,
1944 kDontSaveFPRegs);
Ben Murdoch257744e2011-11-30 15:57:28 +00001945
1946 // Get the static offsets vector filled by the native regexp code.
1947 ExternalReference address_of_static_offsets_vector =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001948 ExternalReference::address_of_static_offsets_vector(isolate());
Ben Murdoch257744e2011-11-30 15:57:28 +00001949 __ li(a2, Operand(address_of_static_offsets_vector));
1950
1951 // a1: number of capture registers
1952 // a2: offsets vector
1953 Label next_capture, done;
1954 // Capture register counter starts from number of capture registers and
1955 // counts down until wrapping after zero.
1956 __ Addu(a0,
1957 last_match_info_elements,
1958 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
1959 __ bind(&next_capture);
1960 __ Subu(a1, a1, Operand(1));
1961 __ Branch(&done, lt, a1, Operand(zero_reg));
1962 // Read the value from the static offsets vector buffer.
1963 __ lw(a3, MemOperand(a2, 0));
1964 __ addiu(a2, a2, kPointerSize);
1965 // Store the smi value in the last match info.
1966 __ sll(a3, a3, kSmiTagSize); // Convert to Smi.
1967 __ sw(a3, MemOperand(a0, 0));
1968 __ Branch(&next_capture, USE_DELAY_SLOT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001969 __ addiu(a0, a0, kPointerSize); // In branch delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00001970
1971 __ bind(&done);
1972
1973 // Return last match info.
1974 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001975 __ DropAndRet(4);
1976
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001977 // Do the runtime call to execute the regexp.
1978 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001979 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001980
1981 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001982 // (5) Long external string? If not, go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001983 __ bind(&not_seq_nor_cons);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001984 // Go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001985 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag));
1986
Ben Murdoch097c5b22016-05-18 11:27:45 +01001987 // (6) External string. Make it, offset-wise, look like a sequential string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001988 __ bind(&external_string);
1989 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
1990 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
1991 if (FLAG_debug_code) {
1992 // Assert that we do not have a cons or slice (indirect strings) here.
1993 // Sequential strings have already been ruled out.
1994 __ And(at, a0, Operand(kIsIndirectStringMask));
1995 __ Assert(eq,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001996 kExternalStringExpectedButNotFound,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001997 at,
1998 Operand(zero_reg));
1999 }
2000 __ lw(subject,
2001 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
2002 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002003 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002004 __ Subu(subject,
2005 subject,
2006 SeqTwoByteString::kHeaderSize - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002007 __ jmp(&seq_string); // Go to (5).
Ben Murdoch592a9fc2012-03-05 11:04:45 +00002008
Ben Murdoch097c5b22016-05-18 11:27:45 +01002009 // (7) Short external string or not a string? If yes, bail out to runtime.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010 __ bind(&not_long_external);
2011 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
2012 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask));
2013 __ Branch(&runtime, ne, at, Operand(zero_reg));
2014
Ben Murdoch097c5b22016-05-18 11:27:45 +01002015 // (8) Sliced string. Replace subject with parent. Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002016 // Load offset into t0 and replace subject string with parent.
2017 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
2018 __ sra(t0, t0, kSmiTagSize);
2019 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
2020 __ jmp(&check_underlying); // Go to (4).
Ben Murdoch257744e2011-11-30 15:57:28 +00002021#endif // V8_INTERPRETED_REGEXP
Steve Block44f0eee2011-05-26 01:26:41 +01002022}
2023
2024
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002025static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
2026 // a0 : number of arguments to the construct function
2027 // a2 : feedback vector
2028 // a3 : slot in feedback vector (Smi)
2029 // a1 : the function to call
2030 FrameScope scope(masm, StackFrame::INTERNAL);
2031 const RegList kSavedRegs = 1 << 4 | // a0
2032 1 << 5 | // a1
2033 1 << 6 | // a2
2034 1 << 7; // a3
2035
2036 // Number-of-arguments register must be smi-tagged to call out.
2037 __ SmiTag(a0);
2038 __ MultiPush(kSavedRegs);
2039
2040 __ CallStub(stub);
2041
2042 __ MultiPop(kSavedRegs);
2043 __ SmiUntag(a0);
2044}
2045
2046
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002047static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002048 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002049 // are uninitialized, monomorphic (indicated by a JSFunction), and
2050 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002051 // a0 : number of arguments to the construct function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002052 // a1 : the function to call
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002053 // a2 : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002054 // a3 : slot in feedback vector (Smi)
2055 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002056
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002057 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
2058 masm->isolate()->heap()->megamorphic_symbol());
2059 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()),
2060 masm->isolate()->heap()->uninitialized_symbol());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002061
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002062 // Load the cache state into t2.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002063 __ Lsa(t2, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002064 __ lw(t2, FieldMemOperand(t2, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002065
2066 // A monomorphic cache hit or an already megamorphic state: invoke the
2067 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002068 // We don't know if t2 is a WeakCell or a Symbol, but it's harmless to read at
2069 // this position in a symbol (see static asserts in type-feedback-vector.h).
2070 Label check_allocation_site;
2071 Register feedback_map = t1;
2072 Register weak_value = t4;
2073 __ lw(weak_value, FieldMemOperand(t2, WeakCell::kValueOffset));
2074 __ Branch(&done, eq, a1, Operand(weak_value));
2075 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
2076 __ Branch(&done, eq, t2, Operand(at));
2077 __ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset));
2078 __ LoadRoot(at, Heap::kWeakCellMapRootIndex);
2079 __ Branch(&check_allocation_site, ne, feedback_map, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002080
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002081 // If the weak cell is cleared, we have a new chance to become monomorphic.
2082 __ JumpIfSmi(weak_value, &initialize);
2083 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002084
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002085 __ bind(&check_allocation_site);
2086 // If we came here, we need to see if we are the array function.
2087 // If we didn't have a matching function, and we didn't find the megamorph
2088 // sentinel, then we have in the slot either some other function or an
2089 // AllocationSite.
2090 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2091 __ Branch(&miss, ne, feedback_map, Operand(at));
2092
2093 // Make sure the function is the Array() function
2094 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t2);
2095 __ Branch(&megamorphic, ne, a1, Operand(t2));
2096 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002097
2098 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002099
2100 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2101 // megamorphic.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002102 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002103 __ Branch(&initialize, eq, t2, Operand(at));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002104 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2105 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002106 __ bind(&megamorphic);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002107 __ Lsa(t2, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002108 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002109 __ sw(at, FieldMemOperand(t2, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002110 __ jmp(&done);
2111
2112 // An uninitialized cache is patched with the function.
2113 __ bind(&initialize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002114 // Make sure the function is the Array() function.
2115 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t2);
2116 __ Branch(&not_array_function, ne, a1, Operand(t2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002118 // The target function is the Array constructor,
2119 // Create an AllocationSite if we don't already have it, store it in the
2120 // slot.
2121 CreateAllocationSiteStub create_stub(masm->isolate());
2122 CallStubInRecordCallTarget(masm, &create_stub);
2123 __ Branch(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002124
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002125 __ bind(&not_array_function);
2126 CreateWeakCellStub weak_cell_stub(masm->isolate());
2127 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002128 __ bind(&done);
2129}
2130
2131
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002132void CallConstructStub::Generate(MacroAssembler* masm) {
2133 // a0 : number of arguments
2134 // a1 : the function to call
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002135 // a2 : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002136 // a3 : slot in feedback vector (Smi, for RecordCallTarget)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002138 Label non_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002139 // Check that the function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002140 __ JumpIfSmi(a1, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002141 // Check that the function is a JSFunction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002142 __ GetObjectType(a1, t1, t1);
2143 __ Branch(&non_function, ne, t1, Operand(JS_FUNCTION_TYPE));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002145 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146
Ben Murdoch097c5b22016-05-18 11:27:45 +01002147 __ Lsa(t1, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002148 Label feedback_register_initialized;
2149 // Put the AllocationSite from the feedback vector into a2, or undefined.
2150 __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize));
2151 __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset));
2152 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2153 __ Branch(&feedback_register_initialized, eq, t1, Operand(at));
2154 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2155 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002156
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002157 __ AssertUndefinedOrAllocationSite(a2, t1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002159 // Pass function as new target.
2160 __ mov(a3, a1);
2161
2162 // Tail call to the function-specific construct stub (still in the caller
2163 // context at this point).
2164 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2165 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
2166 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002167 __ Jump(at);
2168
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002169 __ bind(&non_function);
2170 __ mov(a3, a1);
2171 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002172}
2173
2174
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002175void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002176 // a1 - function
2177 // a3 - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002178 // a2 - vector
2179 // t0 - loaded from vector[slot]
2180 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at);
2181 __ Branch(miss, ne, a1, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002182
2183 __ li(a0, Operand(arg_count()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002184
2185 // Increment the call count for monomorphic function calls.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002186 __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002187 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
2188 __ Addu(a3, a3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2189 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002190
2191 __ mov(a2, t0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002192 __ mov(a3, a1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002193 ArrayConstructorStub stub(masm->isolate(), arg_count());
2194 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002195}
2196
2197
2198void CallICStub::Generate(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002199 // a1 - function
2200 // a3 - slot id (Smi)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002201 // a2 - vector
2202 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002203 int argc = arg_count();
2204 ParameterCount actual(argc);
2205
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002206 // The checks. First, does r1 match the recorded monomorphic target?
Ben Murdoch097c5b22016-05-18 11:27:45 +01002207 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002208 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002209
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002210 // We don't know that we have a weak cell. We might have a private symbol
2211 // or an AllocationSite, but the memory is safe to examine.
2212 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2213 // FixedArray.
2214 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2215 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2216 // computed, meaning that it can't appear to be a pointer. If the low bit is
2217 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2218 // to be a pointer.
2219 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2220 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2221 WeakCell::kValueOffset &&
2222 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002223
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002224 __ lw(t1, FieldMemOperand(t0, WeakCell::kValueOffset));
2225 __ Branch(&extra_checks_or_miss, ne, a1, Operand(t1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002226
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 // The compare above could have been a SMI/SMI comparison. Guard against this
2228 // convincing us that we have a monomorphic JSFunction.
2229 __ JumpIfSmi(a1, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002230
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002231 // Increment the call count for monomorphic function calls.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002232 __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002233 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
2234 __ Addu(a3, a3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2235 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002236
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002237 __ bind(&call_function);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002238 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
2239 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002240 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
2241 USE_DELAY_SLOT);
2242 __ li(a0, Operand(argc)); // In delay slot.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002243
2244 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002245 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002246
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002247 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002248 __ Branch(&call, eq, t0, Operand(at));
2249
2250 // Verify that t0 contains an AllocationSite
2251 __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset));
2252 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2253 __ Branch(&not_allocation_site, ne, t1, Operand(at));
2254
2255 HandleArrayCase(masm, &miss);
2256
2257 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002258
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002259 // The following cases attempt to handle MISS cases without going to the
2260 // runtime.
2261 if (FLAG_trace_ic) {
2262 __ Branch(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002263 }
2264
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002265 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex);
2266 __ Branch(&uninitialized, eq, t0, Operand(at));
2267
2268 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2269 // to handle it here. More complex cases are dealt with in the runtime.
2270 __ AssertNotSmi(t0);
2271 __ GetObjectType(t0, t1, t1);
2272 __ Branch(&miss, ne, t1, Operand(JS_FUNCTION_TYPE));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002273 __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002274 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
2275 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002276
2277 __ bind(&call);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002278 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002279 RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
2280 USE_DELAY_SLOT);
2281 __ li(a0, Operand(argc)); // In delay slot.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002282
2283 __ bind(&uninitialized);
2284
2285 // We are going monomorphic, provided we actually have a JSFunction.
2286 __ JumpIfSmi(a1, &miss);
2287
2288 // Goto miss case if we do not have a function.
2289 __ GetObjectType(a1, t0, t0);
2290 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE));
2291
2292 // Make sure the function is not the Array() function, which requires special
2293 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002295 __ Branch(&miss, eq, a1, Operand(t0));
2296
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002297 // Make sure the function belongs to the same native context.
2298 __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset));
2299 __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX));
2300 __ lw(t1, NativeContextMemOperand());
2301 __ Branch(&miss, ne, t0, Operand(t1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002302
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002303 // Initialize the call counter.
Ben Murdoch097c5b22016-05-18 11:27:45 +01002304 __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002305 __ li(t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2306 __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002307
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002308 // Store the function. Use a stub since we need a frame for allocation.
2309 // a2 - vector
2310 // a3 - slot
2311 // a1 - function
2312 {
2313 FrameScope scope(masm, StackFrame::INTERNAL);
2314 CreateWeakCellStub create_stub(masm->isolate());
2315 __ Push(a1);
2316 __ CallStub(&create_stub);
2317 __ Pop(a1);
2318 }
2319
2320 __ Branch(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002321
2322 // We are here because tracing is on or we encountered a MISS case we can't
2323 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002324 __ bind(&miss);
2325 GenerateMiss(masm);
2326
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002327 __ Branch(&call);
Steve Block44f0eee2011-05-26 01:26:41 +01002328}
2329
2330
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002331void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002332 FrameScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002333
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002334 // Push the receiver and the function and feedback info.
2335 __ Push(a1, a2, a3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002336
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002337 // Call the entry.
2338 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002339
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002340 // Move result to a1 and exit the internal frame.
2341 __ mov(a1, v0);
Steve Block44f0eee2011-05-26 01:26:41 +01002342}
2343
2344
Ben Murdoch257744e2011-11-30 15:57:28 +00002345// StringCharCodeAtGenerator.
Steve Block44f0eee2011-05-26 01:26:41 +01002346void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002347 DCHECK(!t0.is(index_));
2348 DCHECK(!t0.is(result_));
2349 DCHECK(!t0.is(object_));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002350 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2351 // If the receiver is a smi trigger the non-string case.
2352 __ JumpIfSmi(object_, receiver_not_string_);
Ben Murdoch257744e2011-11-30 15:57:28 +00002353
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002354 // Fetch the instance type of the receiver into result register.
2355 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2356 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2357 // If the receiver is not a string trigger the non-string case.
2358 __ And(t0, result_, Operand(kIsNotStringMask));
2359 __ Branch(receiver_not_string_, ne, t0, Operand(zero_reg));
2360 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002361
2362 // If the index is non-smi trigger the non-smi case.
2363 __ JumpIfNotSmi(index_, &index_not_smi_);
2364
Ben Murdoch257744e2011-11-30 15:57:28 +00002365 __ bind(&got_smi_index_);
2366
2367 // Check for index out of range.
2368 __ lw(t0, FieldMemOperand(object_, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002369 __ Branch(index_out_of_range_, ls, t0, Operand(index_));
Ben Murdoch257744e2011-11-30 15:57:28 +00002370
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002371 __ sra(index_, index_, kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002372
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002373 StringCharLoadGenerator::Generate(masm,
2374 object_,
2375 index_,
2376 result_,
2377 &call_runtime_);
Ben Murdoch257744e2011-11-30 15:57:28 +00002378
Ben Murdoch257744e2011-11-30 15:57:28 +00002379 __ sll(result_, result_, kSmiTagSize);
2380 __ bind(&exit_);
Steve Block44f0eee2011-05-26 01:26:41 +01002381}
2382
2383
2384void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002385 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002386 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002387 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Ben Murdoch257744e2011-11-30 15:57:28 +00002388
2389 // Index is not a smi.
2390 __ bind(&index_not_smi_);
2391 // If index is a heap number, try converting it to an integer.
2392 __ CheckMap(index_,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002393 result_,
Ben Murdoch257744e2011-11-30 15:57:28 +00002394 Heap::kHeapNumberMapRootIndex,
2395 index_not_number_,
2396 DONT_DO_SMI_CHECK);
2397 call_helper.BeforeCall(masm);
2398 // Consumed by runtime conversion function:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002399 if (embed_mode == PART_OF_IC_HANDLER) {
2400 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2401 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2402 } else {
2403 __ Push(object_, index_);
2404 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002405 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002406 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Ben Murdoch257744e2011-11-30 15:57:28 +00002407 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002408 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Ben Murdoch257744e2011-11-30 15:57:28 +00002409 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002410 __ CallRuntime(Runtime::kNumberToSmi);
Ben Murdoch257744e2011-11-30 15:57:28 +00002411 }
2412
2413 // Save the conversion result before the pop instructions below
2414 // have a chance to overwrite it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002415 __ Move(index_, v0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002416 if (embed_mode == PART_OF_IC_HANDLER) {
2417 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2418 LoadWithVectorDescriptor::SlotRegister(), object_);
2419 } else {
2420 __ pop(object_);
2421 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002422 // Reload the instance type.
2423 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2424 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2425 call_helper.AfterCall(masm);
2426 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002427 __ JumpIfNotSmi(index_, index_out_of_range_);
Ben Murdoch257744e2011-11-30 15:57:28 +00002428 // Otherwise, return to the fast path.
2429 __ Branch(&got_smi_index_);
2430
2431 // Call runtime. We get here when the receiver is a string and the
2432 // index is a number, but the code of getting the actual character
2433 // is too complex (e.g., when the string needs to be flattened).
2434 __ bind(&call_runtime_);
2435 call_helper.BeforeCall(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002436 __ sll(index_, index_, kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002437 __ Push(object_, index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002438 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Ben Murdoch257744e2011-11-30 15:57:28 +00002439
2440 __ Move(result_, v0);
2441
2442 call_helper.AfterCall(masm);
2443 __ jmp(&exit_);
2444
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002445 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Steve Block44f0eee2011-05-26 01:26:41 +01002446}
2447
2448
2449// -------------------------------------------------------------------------
2450// StringCharFromCodeGenerator
2451
2452void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002453 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2454
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002455 DCHECK(!t0.is(result_));
2456 DCHECK(!t0.is(code_));
Ben Murdoch257744e2011-11-30 15:57:28 +00002457
2458 STATIC_ASSERT(kSmiTag == 0);
2459 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
2461 __ And(t0, code_, Operand(kSmiTagMask |
2462 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002463 __ Branch(&slow_case_, ne, t0, Operand(zero_reg));
2464
2465 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002466 // At this point code register contains smi tagged one-byte char code.
Ben Murdoch257744e2011-11-30 15:57:28 +00002467 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002468 __ Lsa(result_, result_, code_, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002469 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
2470 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
2471 __ Branch(&slow_case_, eq, result_, Operand(t0));
2472 __ bind(&exit_);
Steve Block44f0eee2011-05-26 01:26:41 +01002473}
2474
2475
2476void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002477 MacroAssembler* masm,
2478 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002479 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Ben Murdoch257744e2011-11-30 15:57:28 +00002480
2481 __ bind(&slow_case_);
2482 call_helper.BeforeCall(masm);
2483 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002484 __ CallRuntime(Runtime::kStringCharFromCode);
Ben Murdoch257744e2011-11-30 15:57:28 +00002485 __ Move(result_, v0);
2486
2487 call_helper.AfterCall(masm);
2488 __ Branch(&exit_);
2489
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002490 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Steve Block44f0eee2011-05-26 01:26:41 +01002491}
2492
2493
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002494enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
Steve Block44f0eee2011-05-26 01:26:41 +01002495
2496
Steve Block44f0eee2011-05-26 01:26:41 +01002497void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2498 Register dest,
2499 Register src,
2500 Register count,
2501 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002502 String::Encoding encoding) {
2503 if (FLAG_debug_code) {
2504 // Check that destination is word aligned.
2505 __ And(scratch, dest, Operand(kPointerAlignmentMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002506 __ Check(eq,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002507 kDestinationOfCopyNotAligned,
2508 scratch,
Ben Murdoch257744e2011-11-30 15:57:28 +00002509 Operand(zero_reg));
2510 }
2511
Ben Murdoch257744e2011-11-30 15:57:28 +00002512 // Assumes word reads and writes are little endian.
2513 // Nothing to do for zero characters.
2514 Label done;
2515
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002516 if (encoding == String::TWO_BYTE_ENCODING) {
2517 __ Addu(count, count, count);
Ben Murdoch257744e2011-11-30 15:57:28 +00002518 }
2519
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002520 Register limit = count; // Read until dest equals this.
2521 __ Addu(limit, dest, Operand(count));
Ben Murdoch257744e2011-11-30 15:57:28 +00002522
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002523 Label loop_entry, loop;
Ben Murdoch257744e2011-11-30 15:57:28 +00002524 // Copy bytes from src to dest until dest hits limit.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002525 __ Branch(&loop_entry);
2526 __ bind(&loop);
2527 __ lbu(scratch, MemOperand(src));
2528 __ Addu(src, src, Operand(1));
2529 __ sb(scratch, MemOperand(dest));
2530 __ Addu(dest, dest, Operand(1));
2531 __ bind(&loop_entry);
2532 __ Branch(&loop, lt, dest, Operand(limit));
Ben Murdoch257744e2011-11-30 15:57:28 +00002533
2534 __ bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01002535}
2536
2537
Steve Block44f0eee2011-05-26 01:26:41 +01002538void SubStringStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002539 Label runtime;
Ben Murdoch257744e2011-11-30 15:57:28 +00002540 // Stack frame on entry.
2541 // ra: return address
2542 // sp[0]: to
2543 // sp[4]: from
2544 // sp[8]: string
2545
2546 // This stub is called from the native-call %_SubString(...), so
2547 // nothing can be assumed about the arguments. It is tested that:
2548 // "string" is a sequential string,
2549 // both "from" and "to" are smis, and
2550 // 0 <= from <= to <= string.length.
2551 // If any of these assumptions fail, we call the runtime system.
2552
Ben Murdochdb1b4382012-04-26 19:03:50 +01002553 const int kToOffset = 0 * kPointerSize;
2554 const int kFromOffset = 1 * kPointerSize;
2555 const int kStringOffset = 2 * kPointerSize;
Ben Murdoch257744e2011-11-30 15:57:28 +00002556
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002557 __ lw(a2, MemOperand(sp, kToOffset));
2558 __ lw(a3, MemOperand(sp, kFromOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002559 STATIC_ASSERT(kFromOffset == kToOffset + 4);
2560 STATIC_ASSERT(kSmiTag == 0);
2561 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
2562
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002563 // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
2564 // safe in this case.
2565 __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
2566 __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
2567 // Both a2 and a3 are untagged integers.
Ben Murdoch257744e2011-11-30 15:57:28 +00002568
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002569 __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
Ben Murdoch257744e2011-11-30 15:57:28 +00002570
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002571 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
2572 __ Subu(a2, a2, a3);
Ben Murdoch257744e2011-11-30 15:57:28 +00002573
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002574 // Make sure first argument is a string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002575 __ lw(v0, MemOperand(sp, kStringOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002576 __ JumpIfSmi(v0, &runtime);
Ben Murdoch589d6972011-11-30 16:04:58 +00002577 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002578 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002579 __ And(t0, a1, Operand(kIsNotStringMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002580
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002581 __ Branch(&runtime, ne, t0, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00002582
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002583 Label single_char;
2584 __ Branch(&single_char, eq, a2, Operand(1));
2585
Ben Murdoch589d6972011-11-30 16:04:58 +00002586 // Short-cut for the case of trivial substring.
2587 Label return_v0;
2588 // v0: original string
2589 // a2: result string length
2590 __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
2591 __ sra(t0, t0, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002592 // Return original string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002593 __ Branch(&return_v0, eq, a2, Operand(t0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002594 // Longer than original string's length or negative: unsafe arguments.
2595 __ Branch(&runtime, hi, a2, Operand(t0));
2596 // Shorter than original string's length: an actual substring.
Ben Murdoch257744e2011-11-30 15:57:28 +00002597
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002598 // Deal with different string types: update the index if necessary
2599 // and put the underlying string into t1.
2600 // v0: original string
2601 // a1: instance type
2602 // a2: length
2603 // a3: from index (untagged)
2604 Label underlying_unpacked, sliced_string, seq_or_external_string;
2605 // If the string is not indirect, it can only be sequential or external.
2606 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2607 STATIC_ASSERT(kIsIndirectStringMask != 0);
2608 __ And(t0, a1, Operand(kIsIndirectStringMask));
2609 __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg));
2610 // t0 is used as a scratch register and can be overwritten in either case.
2611 __ And(t0, a1, Operand(kSlicedNotConsMask));
2612 __ Branch(&sliced_string, ne, t0, Operand(zero_reg));
2613 // Cons string. Check whether it is flat, then fetch first part.
2614 __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002615 __ LoadRoot(t0, Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002616 __ Branch(&runtime, ne, t1, Operand(t0));
2617 __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
2618 // Update instance type.
2619 __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
2620 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2621 __ jmp(&underlying_unpacked);
Ben Murdoch257744e2011-11-30 15:57:28 +00002622
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002623 __ bind(&sliced_string);
2624 // Sliced string. Fetch parent and correct start index by offset.
2625 __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
2626 __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
2627 __ sra(t0, t0, 1); // Add offset to index.
2628 __ Addu(a3, a3, t0);
2629 // Update instance type.
2630 __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
2631 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
2632 __ jmp(&underlying_unpacked);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002633
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002634 __ bind(&seq_or_external_string);
2635 // Sequential or external string. Just move string to the expected register.
2636 __ mov(t1, v0);
2637
2638 __ bind(&underlying_unpacked);
2639
2640 if (FLAG_string_slices) {
2641 Label copy_routine;
2642 // t1: underlying subject string
2643 // a1: instance type of underlying subject string
2644 // a2: length
2645 // a3: adjusted start index (untagged)
2646 // Short slice. Copy instead of slicing.
2647 __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
2648 // Allocate new sliced string. At this point we do not reload the instance
2649 // type including the string encoding because we simply rely on the info
2650 // provided by the original string. It does not matter if the original
2651 // string's encoding is wrong because we always have to recheck encoding of
2652 // the newly created string's parent anyways due to externalized strings.
2653 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002654 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002655 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2656 __ And(t0, a1, Operand(kStringEncodingMask));
2657 __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002658 __ AllocateOneByteSlicedString(v0, a2, t2, t3, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002659 __ jmp(&set_slice_header);
2660 __ bind(&two_byte_slice);
2661 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
2662 __ bind(&set_slice_header);
2663 __ sll(a3, a3, 1);
2664 __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
2665 __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
2666 __ jmp(&return_v0);
2667
2668 __ bind(&copy_routine);
2669 }
2670
2671 // t1: underlying subject string
2672 // a1: instance type of underlying subject string
2673 // a2: length
2674 // a3: adjusted start index (untagged)
2675 Label two_byte_sequential, sequential_string, allocate_result;
2676 STATIC_ASSERT(kExternalStringTag != 0);
2677 STATIC_ASSERT(kSeqStringTag == 0);
2678 __ And(t0, a1, Operand(kExternalStringTag));
2679 __ Branch(&sequential_string, eq, t0, Operand(zero_reg));
2680
2681 // Handle external string.
2682 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002683 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002684 __ And(t0, a1, Operand(kShortExternalStringTag));
2685 __ Branch(&runtime, ne, t0, Operand(zero_reg));
2686 __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset));
2687 // t1 already points to the first character of underlying string.
2688 __ jmp(&allocate_result);
2689
2690 __ bind(&sequential_string);
2691 // Locate first character of underlying subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002692 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2693 __ Addu(t1, t1, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002694
2695 __ bind(&allocate_result);
2696 // Sequential acii string. Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002697 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002698 __ And(t0, a1, Operand(kStringEncodingMask));
2699 __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
2700
2701 // Allocate and copy the resulting ASCII string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002702 __ AllocateOneByteString(v0, a2, t0, t2, t3, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002703
2704 // Locate first character of substring to copy.
2705 __ Addu(t1, t1, a3);
2706
Ben Murdoch257744e2011-11-30 15:57:28 +00002707 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708 __ Addu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002709
Ben Murdoch589d6972011-11-30 16:04:58 +00002710 // v0: result string
2711 // a1: first character of result string
2712 // a2: result string length
2713 // t1: first character of substring to copy
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002714 STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2715 StringHelper::GenerateCopyCharacters(
2716 masm, a1, t1, a2, a3, String::ONE_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002717 __ jmp(&return_v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002718
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002719 // Allocate and copy the resulting two-byte string.
2720 __ bind(&two_byte_sequential);
2721 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00002722
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002723 // Locate first character of substring to copy.
Ben Murdoch589d6972011-11-30 16:04:58 +00002724 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002725 __ Lsa(t1, t1, a3, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002726 // Locate first character of result.
2727 __ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch589d6972011-11-30 16:04:58 +00002728
Ben Murdoch257744e2011-11-30 15:57:28 +00002729 // v0: result string.
2730 // a1: first character of result.
2731 // a2: result length.
Ben Murdoch589d6972011-11-30 16:04:58 +00002732 // t1: first character of substring to copy.
Ben Murdoch257744e2011-11-30 15:57:28 +00002733 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002734 StringHelper::GenerateCopyCharacters(
2735 masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002736
2737 __ bind(&return_v0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002738 Counters* counters = isolate()->counters();
Ben Murdoch257744e2011-11-30 15:57:28 +00002739 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002740 __ DropAndRet(3);
Ben Murdoch257744e2011-11-30 15:57:28 +00002741
2742 // Just jump to runtime to create the sub string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002743 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002744 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002745
2746 __ bind(&single_char);
2747 // v0: original string
2748 // a1: instance type
2749 // a2: length
2750 // a3: from index (untagged)
2751 __ SmiTag(a3, a3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002752 StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
2753 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002754 generator.GenerateFast(masm);
2755 __ DropAndRet(3);
2756 generator.SkipSlow(masm, &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00002757}
2758
2759
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002760void ToNumberStub::Generate(MacroAssembler* masm) {
2761 // The ToNumber stub takes one argument in a0.
2762 Label not_smi;
2763 __ JumpIfNotSmi(a0, &not_smi);
2764 __ Ret(USE_DELAY_SLOT);
2765 __ mov(v0, a0);
2766 __ bind(&not_smi);
2767
2768 Label not_heap_number;
Ben Murdochda12d292016-06-02 14:46:10 +01002769 __ GetObjectType(a0, a1, a1);
2770 // a0: receiver
2771 // a1: receiver instance type
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002772 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2773 __ Ret(USE_DELAY_SLOT);
2774 __ mov(v0, a0);
2775 __ bind(&not_heap_number);
2776
Ben Murdochda12d292016-06-02 14:46:10 +01002777 NonNumberToNumberStub stub(masm->isolate());
2778 __ TailCallStub(&stub);
2779}
2780
2781void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
2782 // The NonNumberToNumber stub takes on argument in a0.
2783 __ AssertNotNumber(a0);
2784
2785 Label not_string;
2786 __ GetObjectType(a0, a1, a1);
2787 // a0: receiver
2788 // a1: receiver instance type
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002789 __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
Ben Murdochda12d292016-06-02 14:46:10 +01002790 StringToNumberStub stub(masm->isolate());
2791 __ TailCallStub(&stub);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002792 __ bind(&not_string);
2793
2794 Label not_oddball;
2795 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2796 __ Ret(USE_DELAY_SLOT);
Ben Murdochda12d292016-06-02 14:46:10 +01002797 __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset)); // In delay slot.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002798 __ bind(&not_oddball);
2799
Ben Murdochda12d292016-06-02 14:46:10 +01002800 __ Push(a0); // Push argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002801 __ TailCallRuntime(Runtime::kToNumber);
2802}
2803
Ben Murdochda12d292016-06-02 14:46:10 +01002804void StringToNumberStub::Generate(MacroAssembler* masm) {
2805 // The StringToNumber stub takes on argument in a0.
2806 __ AssertString(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002807
Ben Murdochda12d292016-06-02 14:46:10 +01002808 // Check if string has a cached array index.
2809 Label runtime;
2810 __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset));
2811 __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
2812 __ Branch(&runtime, ne, at, Operand(zero_reg));
2813 __ IndexFromHash(a2, v0);
2814 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002815
Ben Murdochda12d292016-06-02 14:46:10 +01002816 __ bind(&runtime);
2817 __ Push(a0); // Push argument.
2818 __ TailCallRuntime(Runtime::kStringToNumber);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002819}
2820
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002821void ToStringStub::Generate(MacroAssembler* masm) {
2822 // The ToString stub takes on argument in a0.
2823 Label is_number;
2824 __ JumpIfSmi(a0, &is_number);
2825
2826 Label not_string;
2827 __ GetObjectType(a0, a1, a1);
2828 // a0: receiver
2829 // a1: receiver instance type
2830 __ Branch(&not_string, ge, a1, Operand(FIRST_NONSTRING_TYPE));
2831 __ Ret(USE_DELAY_SLOT);
2832 __ mov(v0, a0);
2833 __ bind(&not_string);
2834
2835 Label not_heap_number;
2836 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2837 __ bind(&is_number);
2838 NumberToStringStub stub(isolate());
2839 __ TailCallStub(&stub);
2840 __ bind(&not_heap_number);
2841
2842 Label not_oddball;
2843 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2844 __ Ret(USE_DELAY_SLOT);
2845 __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
2846 __ bind(&not_oddball);
2847
2848 __ push(a0); // Push argument.
2849 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002850}
2851
2852
Ben Murdoch097c5b22016-05-18 11:27:45 +01002853void ToNameStub::Generate(MacroAssembler* masm) {
2854 // The ToName stub takes on argument in a0.
2855 Label is_number;
2856 __ JumpIfSmi(a0, &is_number);
2857
2858 Label not_name;
2859 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2860 __ GetObjectType(a0, a1, a1);
2861 // a0: receiver
2862 // a1: receiver instance type
2863 __ Branch(&not_name, gt, a1, Operand(LAST_NAME_TYPE));
2864 __ Ret(USE_DELAY_SLOT);
2865 __ mov(v0, a0);
2866 __ bind(&not_name);
2867
2868 Label not_heap_number;
2869 __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2870 __ bind(&is_number);
2871 NumberToStringStub stub(isolate());
2872 __ TailCallStub(&stub);
2873 __ bind(&not_heap_number);
2874
2875 Label not_oddball;
2876 __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2877 __ Ret(USE_DELAY_SLOT);
2878 __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
2879 __ bind(&not_oddball);
2880
2881 __ push(a0); // Push argument.
2882 __ TailCallRuntime(Runtime::kToName);
2883}
2884
2885
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002886void StringHelper::GenerateFlatOneByteStringEquals(
2887 MacroAssembler* masm, Register left, Register right, Register scratch1,
2888 Register scratch2, Register scratch3) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002889 Register length = scratch1;
2890
2891 // Compare lengths.
2892 Label strings_not_equal, check_zero_length;
2893 __ lw(length, FieldMemOperand(left, String::kLengthOffset));
2894 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset));
2895 __ Branch(&check_zero_length, eq, length, Operand(scratch2));
2896 __ bind(&strings_not_equal);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002897 DCHECK(is_int16(NOT_EQUAL));
2898 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00002899 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002900
2901 // Check if the length is zero.
2902 Label compare_chars;
2903 __ bind(&check_zero_length);
2904 STATIC_ASSERT(kSmiTag == 0);
2905 __ Branch(&compare_chars, ne, length, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002906 DCHECK(is_int16(EQUAL));
2907 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00002908 __ li(v0, Operand(Smi::FromInt(EQUAL)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002909
2910 // Compare characters.
2911 __ bind(&compare_chars);
2912
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, scratch3,
2914 v0, &strings_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002915
2916 // Characters are equal.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002917 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00002918 __ li(v0, Operand(Smi::FromInt(EQUAL)));
Steve Block44f0eee2011-05-26 01:26:41 +01002919}
2920
2921
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002922void StringHelper::GenerateCompareFlatOneByteStrings(
2923 MacroAssembler* masm, Register left, Register right, Register scratch1,
2924 Register scratch2, Register scratch3, Register scratch4) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002925 Label result_not_equal, compare_lengths;
2926 // Find minimum length and length difference.
2927 __ lw(scratch1, FieldMemOperand(left, String::kLengthOffset));
2928 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset));
2929 __ Subu(scratch3, scratch1, Operand(scratch2));
2930 Register length_delta = scratch3;
2931 __ slt(scratch4, scratch2, scratch1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002932 __ Movn(scratch1, scratch2, scratch4);
Ben Murdoch257744e2011-11-30 15:57:28 +00002933 Register min_length = scratch1;
2934 STATIC_ASSERT(kSmiTag == 0);
2935 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg));
2936
2937 // Compare loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002938 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2939 scratch4, v0, &result_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002940
2941 // Compare lengths - strings up to min-length are equal.
2942 __ bind(&compare_lengths);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002943 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002944 // Use length_delta as result if it's zero.
2945 __ mov(scratch2, length_delta);
2946 __ mov(scratch4, zero_reg);
2947 __ mov(v0, zero_reg);
2948
2949 __ bind(&result_not_equal);
2950 // Conditionally update the result based either on length_delta or
2951 // the last comparion performed in the loop above.
2952 Label ret;
2953 __ Branch(&ret, eq, scratch2, Operand(scratch4));
2954 __ li(v0, Operand(Smi::FromInt(GREATER)));
2955 __ Branch(&ret, gt, scratch2, Operand(scratch4));
2956 __ li(v0, Operand(Smi::FromInt(LESS)));
2957 __ bind(&ret);
2958 __ Ret();
2959}
2960
2961
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002962void StringHelper::GenerateOneByteCharsCompareLoop(
2963 MacroAssembler* masm, Register left, Register right, Register length,
2964 Register scratch1, Register scratch2, Register scratch3,
Ben Murdoch257744e2011-11-30 15:57:28 +00002965 Label* chars_not_equal) {
2966 // Change index to run from -length to -1 by adding length to string
2967 // start. This means that loop ends when index reaches zero, which
2968 // doesn't need an additional compare.
2969 __ SmiUntag(length);
2970 __ Addu(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002971 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002972 __ Addu(left, left, Operand(scratch1));
2973 __ Addu(right, right, Operand(scratch1));
2974 __ Subu(length, zero_reg, length);
2975 Register index = length; // index = -length;
2976
2977
2978 // Compare loop.
2979 Label loop;
2980 __ bind(&loop);
2981 __ Addu(scratch3, left, index);
2982 __ lbu(scratch1, MemOperand(scratch3));
2983 __ Addu(scratch3, right, index);
2984 __ lbu(scratch2, MemOperand(scratch3));
2985 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2));
2986 __ Addu(index, index, 1);
2987 __ Branch(&loop, ne, index, Operand(zero_reg));
Steve Block44f0eee2011-05-26 01:26:41 +01002988}
2989
2990
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002991void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2992 // ----------- S t a t e -------------
2993 // -- a1 : left
2994 // -- a0 : right
2995 // -- ra : return address
2996 // -----------------------------------
Ben Murdoch257744e2011-11-30 15:57:28 +00002997
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002998 // Load a2 with the allocation site. We stick an undefined dummy value here
2999 // and replace it with the real allocation site later when we instantiate this
3000 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3001 __ li(a2, handle(isolate()->heap()->undefined_value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003002
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003003 // Make sure that we actually patched the allocation site.
3004 if (FLAG_debug_code) {
3005 __ And(at, a2, Operand(kSmiTagMask));
3006 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg));
3007 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset));
3008 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3009 __ Assert(eq, kExpectedAllocationSite, t0, Operand(at));
Ben Murdoch257744e2011-11-30 15:57:28 +00003010 }
3011
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003012 // Tail call into the stub that handles binary operations with allocation
3013 // sites.
3014 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3015 __ TailCallStub(&stub);
Ben Murdoch257744e2011-11-30 15:57:28 +00003016}
3017
3018
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003019void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3020 DCHECK_EQ(CompareICState::BOOLEAN, state());
3021 Label miss;
3022
3023 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3024 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003025 if (!Token::IsEqualityOp(op())) {
3026 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset));
3027 __ AssertSmi(a1);
3028 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset));
3029 __ AssertSmi(a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003030 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003031 __ Ret(USE_DELAY_SLOT);
3032 __ Subu(v0, a1, a0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003033
3034 __ bind(&miss);
3035 GenerateMiss(masm);
3036}
3037
3038
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003039void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3040 DCHECK(state() == CompareICState::SMI);
Ben Murdoch257744e2011-11-30 15:57:28 +00003041 Label miss;
3042 __ Or(a2, a1, a0);
3043 __ JumpIfNotSmi(a2, &miss);
3044
3045 if (GetCondition() == eq) {
3046 // For equality we do not care about the sign of the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003047 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003048 __ Subu(v0, a0, a1);
3049 } else {
3050 // Untag before subtracting to avoid handling overflow.
3051 __ SmiUntag(a1);
3052 __ SmiUntag(a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003053 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003054 __ Subu(v0, a1, a0);
3055 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003056
3057 __ bind(&miss);
3058 GenerateMiss(masm);
Steve Block44f0eee2011-05-26 01:26:41 +01003059}
3060
3061
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003062void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3063 DCHECK(state() == CompareICState::NUMBER);
Ben Murdoch257744e2011-11-30 15:57:28 +00003064
3065 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003066 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdoch257744e2011-11-30 15:57:28 +00003067 Label miss;
Ben Murdoch257744e2011-11-30 15:57:28 +00003068
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003069 if (left() == CompareICState::SMI) {
3070 __ JumpIfNotSmi(a1, &miss);
3071 }
3072 if (right() == CompareICState::SMI) {
3073 __ JumpIfNotSmi(a0, &miss);
Ben Murdoch85b71792012-04-11 18:30:58 +01003074 }
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003075
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003076 // Inlining the double comparison and falling back to the general compare
3077 // stub if NaN is involved.
3078 // Load left and right operand.
3079 Label done, left, left_smi, right_smi;
3080 __ JumpIfSmi(a0, &right_smi);
3081 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
3082 DONT_DO_SMI_CHECK);
3083 __ Subu(a2, a0, Operand(kHeapObjectTag));
3084 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset));
3085 __ Branch(&left);
3086 __ bind(&right_smi);
3087 __ SmiUntag(a2, a0); // Can't clobber a0 yet.
3088 FPURegister single_scratch = f6;
3089 __ mtc1(a2, single_scratch);
3090 __ cvt_d_w(f2, single_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003091
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003092 __ bind(&left);
3093 __ JumpIfSmi(a1, &left_smi);
3094 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
3095 DONT_DO_SMI_CHECK);
3096 __ Subu(a2, a1, Operand(kHeapObjectTag));
3097 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset));
3098 __ Branch(&done);
3099 __ bind(&left_smi);
3100 __ SmiUntag(a2, a1); // Can't clobber a1 yet.
3101 single_scratch = f8;
3102 __ mtc1(a2, single_scratch);
3103 __ cvt_d_w(f0, single_scratch);
3104
3105 __ bind(&done);
3106
3107 // Return a result of -1, 0, or 1, or use CompareStub for NaNs.
3108 Label fpu_eq, fpu_lt;
3109 // Test if equal, and also handle the unordered/NaN case.
3110 __ BranchF(&fpu_eq, &unordered, eq, f0, f2);
3111
3112 // Test if less (unordered case is already handled).
3113 __ BranchF(&fpu_lt, NULL, lt, f0, f2);
3114
3115 // Otherwise it's greater, so just fall thru, and return.
3116 DCHECK(is_int16(GREATER) && is_int16(EQUAL) && is_int16(LESS));
3117 __ Ret(USE_DELAY_SLOT);
3118 __ li(v0, Operand(GREATER));
3119
3120 __ bind(&fpu_eq);
3121 __ Ret(USE_DELAY_SLOT);
3122 __ li(v0, Operand(EQUAL));
3123
3124 __ bind(&fpu_lt);
3125 __ Ret(USE_DELAY_SLOT);
3126 __ li(v0, Operand(LESS));
3127
3128 __ bind(&unordered);
Ben Murdoch257744e2011-11-30 15:57:28 +00003129 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003130 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003131 CompareICState::GENERIC, CompareICState::GENERIC);
Ben Murdoch257744e2011-11-30 15:57:28 +00003132 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3133
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003134 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003135 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003136 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3137 __ Branch(&miss, ne, a0, Operand(at));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003138 __ JumpIfSmi(a1, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003139 __ GetObjectType(a1, a2, a2);
3140 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
3141 __ jmp(&unordered);
3142 }
3143
3144 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003145 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003146 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3147 __ Branch(&unordered, eq, a1, Operand(at));
3148 }
3149
Ben Murdoch257744e2011-11-30 15:57:28 +00003150 __ bind(&miss);
3151 GenerateMiss(masm);
3152}
3153
3154
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003155void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3156 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00003157 Label miss;
3158
3159 // Registers containing left and right operands respectively.
3160 Register left = a1;
3161 Register right = a0;
3162 Register tmp1 = a2;
3163 Register tmp2 = a3;
3164
3165 // Check that both operands are heap objects.
3166 __ JumpIfEitherSmi(left, right, &miss);
3167
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003168 // Check that both operands are internalized strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00003169 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3170 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3171 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3172 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003173 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3174 __ Or(tmp1, tmp1, Operand(tmp2));
3175 __ And(at, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3176 __ Branch(&miss, ne, at, Operand(zero_reg));
3177
Ben Murdoch257744e2011-11-30 15:57:28 +00003178 // Make sure a0 is non-zero. At this point input operands are
3179 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003180 DCHECK(right.is(a0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003181 STATIC_ASSERT(EQUAL == 0);
3182 STATIC_ASSERT(kSmiTag == 0);
3183 __ mov(v0, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003184 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00003185 __ Ret(ne, left, Operand(right));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003186 DCHECK(is_int16(EQUAL));
3187 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003188 __ li(v0, Operand(Smi::FromInt(EQUAL)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003189
3190 __ bind(&miss);
3191 GenerateMiss(masm);
3192}
3193
3194
3195void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3196 DCHECK(state() == CompareICState::UNIQUE_NAME);
3197 DCHECK(GetCondition() == eq);
3198 Label miss;
3199
3200 // Registers containing left and right operands respectively.
3201 Register left = a1;
3202 Register right = a0;
3203 Register tmp1 = a2;
3204 Register tmp2 = a3;
3205
3206 // Check that both operands are heap objects.
3207 __ JumpIfEitherSmi(left, right, &miss);
3208
3209 // Check that both operands are unique names. This leaves the instance
3210 // types loaded in tmp1 and tmp2.
3211 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3212 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3213 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3214 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3215
3216 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
3217 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
3218
3219 // Use a0 as result
3220 __ mov(v0, a0);
3221
3222 // Unique names are compared by identity.
3223 Label done;
3224 __ Branch(&done, ne, left, Operand(right));
3225 // Make sure a0 is non-zero. At this point input operands are
3226 // guaranteed to be non-zero.
3227 DCHECK(right.is(a0));
3228 STATIC_ASSERT(EQUAL == 0);
3229 STATIC_ASSERT(kSmiTag == 0);
3230 __ li(v0, Operand(Smi::FromInt(EQUAL)));
3231 __ bind(&done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003232 __ Ret();
3233
3234 __ bind(&miss);
3235 GenerateMiss(masm);
3236}
3237
3238
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003239void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3240 DCHECK(state() == CompareICState::STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00003241 Label miss;
3242
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003243 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003244
Ben Murdoch257744e2011-11-30 15:57:28 +00003245 // Registers containing left and right operands respectively.
3246 Register left = a1;
3247 Register right = a0;
3248 Register tmp1 = a2;
3249 Register tmp2 = a3;
3250 Register tmp3 = t0;
3251 Register tmp4 = t1;
3252 Register tmp5 = t2;
3253
3254 // Check that both operands are heap objects.
3255 __ JumpIfEitherSmi(left, right, &miss);
3256
3257 // Check that both operands are strings. This leaves the instance
3258 // types loaded in tmp1 and tmp2.
3259 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3260 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3261 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3262 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3263 STATIC_ASSERT(kNotStringTag != 0);
3264 __ Or(tmp3, tmp1, tmp2);
3265 __ And(tmp5, tmp3, Operand(kIsNotStringMask));
3266 __ Branch(&miss, ne, tmp5, Operand(zero_reg));
3267
3268 // Fast check for identical strings.
3269 Label left_ne_right;
3270 STATIC_ASSERT(EQUAL == 0);
3271 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003272 __ Branch(&left_ne_right, ne, left, Operand(right));
3273 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003274 __ mov(v0, zero_reg); // In the delay slot.
Ben Murdoch257744e2011-11-30 15:57:28 +00003275 __ bind(&left_ne_right);
3276
3277 // Handle not identical strings.
3278
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003279 // Check that both strings are internalized strings. If they are, we're done
3280 // because we already know they are not identical. We know they are both
3281 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003282 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003283 DCHECK(GetCondition() == eq);
3284 STATIC_ASSERT(kInternalizedTag == 0);
3285 __ Or(tmp3, tmp1, Operand(tmp2));
3286 __ And(tmp5, tmp3, Operand(kIsNotInternalizedMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003287 Label is_symbol;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003288 __ Branch(&is_symbol, ne, tmp5, Operand(zero_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003289 // Make sure a0 is non-zero. At this point input operands are
3290 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003291 DCHECK(right.is(a0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003292 __ Ret(USE_DELAY_SLOT);
3293 __ mov(v0, a0); // In the delay slot.
3294 __ bind(&is_symbol);
3295 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003296
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003297 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00003298 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003299 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
3300 &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00003301
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003302 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003303 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003304 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, tmp2,
3305 tmp3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003306 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003307 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3308 tmp2, tmp3, tmp4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003309 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003310
3311 // Handle more complex cases in runtime.
3312 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003313 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01003314 {
3315 FrameScope scope(masm, StackFrame::INTERNAL);
3316 __ Push(left, right);
3317 __ CallRuntime(Runtime::kStringEqual);
3318 }
3319 __ LoadRoot(a0, Heap::kTrueValueRootIndex);
3320 __ Ret(USE_DELAY_SLOT);
3321 __ Subu(v0, v0, a0); // In delay slot.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003322 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003323 __ Push(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003324 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003325 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003326
3327 __ bind(&miss);
3328 GenerateMiss(masm);
Steve Block44f0eee2011-05-26 01:26:41 +01003329}
3330
3331
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003332void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3333 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdoch257744e2011-11-30 15:57:28 +00003334 Label miss;
3335 __ And(a2, a1, Operand(a0));
3336 __ JumpIfSmi(a2, &miss);
3337
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003338 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Ben Murdoch257744e2011-11-30 15:57:28 +00003339 __ GetObjectType(a0, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003340 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00003341 __ GetObjectType(a1, a2, a2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003342 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00003343
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003344 DCHECK_EQ(eq, GetCondition());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003345 __ Ret(USE_DELAY_SLOT);
3346 __ subu(v0, a0, a1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003347
3348 __ bind(&miss);
3349 GenerateMiss(masm);
Steve Block44f0eee2011-05-26 01:26:41 +01003350}
3351
3352
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003353void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003354 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003355 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003356 __ And(a2, a1, a0);
3357 __ JumpIfSmi(a2, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003358 __ GetWeakValue(t0, cell);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003359 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
3360 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003361 __ Branch(&miss, ne, a2, Operand(t0));
3362 __ Branch(&miss, ne, a3, Operand(t0));
Ben Murdoch85b71792012-04-11 18:30:58 +01003363
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003364 if (Token::IsEqualityOp(op())) {
3365 __ Ret(USE_DELAY_SLOT);
3366 __ subu(v0, a0, a1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003367 } else {
3368 if (op() == Token::LT || op() == Token::LTE) {
3369 __ li(a2, Operand(Smi::FromInt(GREATER)));
3370 } else {
3371 __ li(a2, Operand(Smi::FromInt(LESS)));
3372 }
3373 __ Push(a1, a0, a2);
3374 __ TailCallRuntime(Runtime::kCompare);
3375 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003376
3377 __ bind(&miss);
3378 GenerateMiss(masm);
3379}
3380
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003381
3382void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003383 {
3384 // Call the runtime system in a fresh internal frame.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003385 FrameScope scope(masm, StackFrame::INTERNAL);
3386 __ Push(a1, a0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003387 __ Push(ra, a1, a0);
3388 __ li(t0, Operand(Smi::FromInt(op())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003389 __ addiu(sp, sp, -kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003390 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs,
3391 USE_DELAY_SLOT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003392 __ sw(t0, MemOperand(sp)); // In the delay slot.
3393 // Compute the entry point of the rewritten stub.
3394 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
3395 // Restore registers.
3396 __ Pop(a1, a0, ra);
3397 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003398 __ Jump(a2);
3399}
3400
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003401
Ben Murdoch257744e2011-11-30 15:57:28 +00003402void DirectCEntryStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003403 // Make place for arguments to fit C calling convention. Most of the callers
3404 // of DirectCEntryStub::GenerateCall are using EnterExitFrame/LeaveExitFrame
3405 // so they handle stack restoring and we don't have to do that here.
3406 // Any caller of DirectCEntryStub::GenerateCall must take care of dropping
3407 // kCArgsSlotsSize stack space after the call.
3408 __ Subu(sp, sp, Operand(kCArgsSlotsSize));
3409 // Place the return address on the stack, making the call
3410 // GC safe. The RegExp backend also relies on this.
3411 __ sw(ra, MemOperand(sp, kCArgsSlotsSize));
3412 __ Call(t9); // Call the C++ function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003413 __ lw(t9, MemOperand(sp, kCArgsSlotsSize));
3414
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003415 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003416 // In case of an error the return address may point to a memory area
3417 // filled with kZapValue by the GC.
3418 // Dereference the address and check for this.
3419 __ lw(t0, MemOperand(t9));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003420 __ Assert(ne, kReceivedInvalidReturnAddress, t0,
Ben Murdoch257744e2011-11-30 15:57:28 +00003421 Operand(reinterpret_cast<uint32_t>(kZapValue)));
3422 }
3423 __ Jump(t9);
Steve Block44f0eee2011-05-26 01:26:41 +01003424}
3425
3426
Ben Murdoch257744e2011-11-30 15:57:28 +00003427void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Ben Murdoch257744e2011-11-30 15:57:28 +00003428 Register target) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003429 intptr_t loc =
3430 reinterpret_cast<intptr_t>(GetCode().location());
Ben Murdoch257744e2011-11-30 15:57:28 +00003431 __ Move(t9, target);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003432 __ li(at, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE);
3433 __ Call(at);
Ben Murdoch257744e2011-11-30 15:57:28 +00003434}
3435
3436
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003437void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3438 Label* miss,
3439 Label* done,
3440 Register receiver,
3441 Register properties,
3442 Handle<Name> name,
3443 Register scratch0) {
3444 DCHECK(name->IsUniqueName());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003445 // If names of slots in range from 1 to kProbes - 1 for the hash value are
Ben Murdoch257744e2011-11-30 15:57:28 +00003446 // not equal to the name and kProbes-th slot is not used (its name is the
3447 // undefined value), it guarantees the hash table doesn't contain the
3448 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003449 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003450 for (int i = 0; i < kInlinedProbes; i++) {
3451 // scratch0 points to properties hash.
3452 // Compute the masked index: (hash + i + i * i) & mask.
3453 Register index = scratch0;
3454 // Capacity is smi 2^n.
3455 __ lw(index, FieldMemOperand(properties, kCapacityOffset));
3456 __ Subu(index, index, Operand(1));
3457 __ And(index, index, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003458 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))));
Ben Murdoch257744e2011-11-30 15:57:28 +00003459
3460 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003461 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003462 __ Lsa(index, index, index, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003463
3464 Register entity_name = scratch0;
3465 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003466 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003467 Register tmp = properties;
Ben Murdoch097c5b22016-05-18 11:27:45 +01003468 __ Lsa(tmp, properties, index, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003469 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
3470
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003471 DCHECK(!tmp.is(entity_name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003472 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3473 __ Branch(done, eq, entity_name, Operand(tmp));
3474
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003475 // Load the hole ready for use below:
3476 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003477
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003478 // Stop if found the property.
3479 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name)));
Ben Murdoch257744e2011-11-30 15:57:28 +00003480
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003481 Label good;
3482 __ Branch(&good, eq, entity_name, Operand(tmp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003483
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003484 // Check if the entry name is not a unique name.
3485 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
3486 __ lbu(entity_name,
3487 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
3488 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3489 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003490
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003491 // Restore the properties.
3492 __ lw(properties,
3493 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003494 }
3495
3496 const int spill_mask =
3497 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003498 a2.bit() | a1.bit() | a0.bit() | v0.bit());
Ben Murdoch257744e2011-11-30 15:57:28 +00003499
3500 __ MultiPush(spill_mask);
3501 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003502 __ li(a1, Operand(Handle<Name>(name)));
3503 NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003504 __ CallStub(&stub);
3505 __ mov(at, v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003506 __ MultiPop(spill_mask);
3507
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003508 __ Branch(done, eq, at, Operand(zero_reg));
3509 __ Branch(miss, ne, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003510}
3511
3512
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003513// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003514// |done| label if a property with the given name is found. Jump to
3515// the |miss| label otherwise.
3516// If lookup was successful |scratch2| will be equal to elements + 4 * index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003517void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3518 Label* miss,
3519 Label* done,
3520 Register elements,
3521 Register name,
3522 Register scratch1,
3523 Register scratch2) {
3524 DCHECK(!elements.is(scratch1));
3525 DCHECK(!elements.is(scratch2));
3526 DCHECK(!name.is(scratch1));
3527 DCHECK(!name.is(scratch2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003528
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003529 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003530
3531 // Compute the capacity mask.
3532 __ lw(scratch1, FieldMemOperand(elements, kCapacityOffset));
3533 __ sra(scratch1, scratch1, kSmiTagSize); // convert smi to int
3534 __ Subu(scratch1, scratch1, Operand(1));
3535
3536 // Generate an unrolled loop that performs a few probes before
3537 // giving up. Measurements done on Gmail indicate that 2 probes
3538 // cover ~93% of loads from dictionaries.
3539 for (int i = 0; i < kInlinedProbes; i++) {
3540 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003541 __ lw(scratch2, FieldMemOperand(name, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003542 if (i > 0) {
3543 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3544 // the hash in a separate instruction. The value hash + i + i * i is right
3545 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003546 DCHECK(NameDictionary::GetProbeOffset(i) <
3547 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003548 __ Addu(scratch2, scratch2, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003549 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003550 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003551 __ srl(scratch2, scratch2, Name::kHashShift);
Ben Murdoch257744e2011-11-30 15:57:28 +00003552 __ And(scratch2, scratch1, scratch2);
3553
3554 // Scale the index by multiplying by the element size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003555 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003556 // scratch2 = scratch2 * 3.
3557
Ben Murdoch097c5b22016-05-18 11:27:45 +01003558 __ Lsa(scratch2, scratch2, scratch2, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003559
3560 // Check if the key is identical to the name.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003561 __ Lsa(scratch2, elements, scratch2, 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003562 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset));
3563 __ Branch(done, eq, name, Operand(at));
3564 }
3565
3566 const int spill_mask =
3567 (ra.bit() | t2.bit() | t1.bit() | t0.bit() |
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003568 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
Ben Murdoch257744e2011-11-30 15:57:28 +00003569 ~(scratch1.bit() | scratch2.bit());
3570
3571 __ MultiPush(spill_mask);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003572 if (name.is(a0)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003573 DCHECK(!elements.is(a1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003574 __ Move(a1, name);
3575 __ Move(a0, elements);
3576 } else {
3577 __ Move(a0, elements);
3578 __ Move(a1, name);
3579 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003580 NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003581 __ CallStub(&stub);
3582 __ mov(scratch2, a2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003583 __ mov(at, v0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003584 __ MultiPop(spill_mask);
3585
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003586 __ Branch(done, ne, at, Operand(zero_reg));
3587 __ Branch(miss, eq, at, Operand(zero_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003588}
3589
3590
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003591void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003592 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3593 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003594 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003595 // result: NameDictionary to probe
Ben Murdoch257744e2011-11-30 15:57:28 +00003596 // a1: key
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003597 // dictionary: NameDictionary to probe.
3598 // index: will hold an index of entry if lookup is successful.
3599 // might alias with result_.
Ben Murdoch257744e2011-11-30 15:57:28 +00003600 // Returns:
3601 // result_ is zero if lookup failed, non zero otherwise.
3602
3603 Register result = v0;
3604 Register dictionary = a0;
3605 Register key = a1;
3606 Register index = a2;
3607 Register mask = a3;
3608 Register hash = t0;
3609 Register undefined = t1;
3610 Register entry_key = t2;
3611
3612 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3613
3614 __ lw(mask, FieldMemOperand(dictionary, kCapacityOffset));
3615 __ sra(mask, mask, kSmiTagSize);
3616 __ Subu(mask, mask, Operand(1));
3617
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003618 __ lw(hash, FieldMemOperand(key, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003619
3620 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3621
3622 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3623 // Compute the masked index: (hash + i + i * i) & mask.
3624 // Capacity is smi 2^n.
3625 if (i > 0) {
3626 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3627 // the hash in a separate instruction. The value hash + i + i * i is right
3628 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003629 DCHECK(NameDictionary::GetProbeOffset(i) <
3630 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003631 __ Addu(index, hash, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003632 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003633 } else {
3634 __ mov(index, hash);
3635 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003636 __ srl(index, index, Name::kHashShift);
Ben Murdoch257744e2011-11-30 15:57:28 +00003637 __ And(index, mask, index);
3638
3639 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003640 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003641 // index *= 3.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003642 __ Lsa(index, index, index, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003644 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +01003645 __ Lsa(index, dictionary, index, 2);
Ben Murdoch257744e2011-11-30 15:57:28 +00003646 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset));
3647
3648 // Having undefined at this place means the name is not contained.
3649 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
3650
3651 // Stop if found the property.
3652 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
3653
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003654 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3655 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003656 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
3657 __ lbu(entry_key,
3658 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003659 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003660 }
3661 }
3662
3663 __ bind(&maybe_in_dictionary);
3664 // If we are doing negative lookup then probing failure should be
3665 // treated as a lookup success. For positive lookup probing failure
3666 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003667 if (mode() == POSITIVE_LOOKUP) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003668 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003669 __ mov(result, zero_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00003670 }
3671
3672 __ bind(&in_dictionary);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003673 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003674 __ li(result, 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003675
3676 __ bind(&not_in_dictionary);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003677 __ Ret(USE_DELAY_SLOT);
Ben Murdoch257744e2011-11-30 15:57:28 +00003678 __ mov(result, zero_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003679}
3680
3681
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003682void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3683 Isolate* isolate) {
3684 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3685 stub1.GetCode();
3686 // Hydrogen code stubs need stub2 at snapshot time.
3687 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3688 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003689}
3690
3691
3692// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3693// the value has just been written into the object, now this stub makes sure
3694// we keep the GC informed. The word in the object where the value has been
3695// written is in the address register.
3696void RecordWriteStub::Generate(MacroAssembler* masm) {
3697 Label skip_to_incremental_noncompacting;
3698 Label skip_to_incremental_compacting;
3699
3700 // The first two branch+nop instructions are generated with labels so as to
3701 // get the offset fixed up correctly by the bind(Label*) call. We patch it
3702 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
3703 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
3704 // incremental heap marking.
3705 // See RecordWriteStub::Patch for details.
3706 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
3707 __ nop();
3708 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
3709 __ nop();
3710
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003711 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3712 __ RememberedSetHelper(object(),
3713 address(),
3714 value(),
3715 save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003716 MacroAssembler::kReturnAtEnd);
3717 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003718 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003719
3720 __ bind(&skip_to_incremental_noncompacting);
3721 GenerateIncremental(masm, INCREMENTAL);
3722
3723 __ bind(&skip_to_incremental_compacting);
3724 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3725
3726 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3727 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3728
3729 PatchBranchIntoNop(masm, 0);
3730 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
3731}
3732
3733
3734void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3735 regs_.Save(masm);
3736
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003737 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003738 Label dont_need_remembered_set;
3739
3740 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
3741 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3742 regs_.scratch0(),
3743 &dont_need_remembered_set);
3744
Ben Murdoch097c5b22016-05-18 11:27:45 +01003745 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3746 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003747
3748 // First notify the incremental marker if necessary, then update the
3749 // remembered set.
3750 CheckNeedsToInformIncrementalMarker(
3751 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003752 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003753 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003754 __ RememberedSetHelper(object(),
3755 address(),
3756 value(),
3757 save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003758 MacroAssembler::kReturnAtEnd);
3759
3760 __ bind(&dont_need_remembered_set);
3761 }
3762
3763 CheckNeedsToInformIncrementalMarker(
3764 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003765 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003766 regs_.Restore(masm);
3767 __ Ret();
3768}
3769
3770
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003771void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3772 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003773 int argument_count = 3;
3774 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3775 Register address =
3776 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003777 DCHECK(!address.is(regs_.object()));
3778 DCHECK(!address.is(a0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003779 __ Move(address, regs_.address());
3780 __ Move(a0, regs_.object());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003781 __ Move(a1, address);
3782 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003783
3784 AllowExternalCallThatCantCauseGC scope(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003785 __ CallCFunction(
3786 ExternalReference::incremental_marking_record_write_function(isolate()),
3787 argument_count);
3788 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003789}
3790
3791
3792void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3793 MacroAssembler* masm,
3794 OnNoNeedToInformIncrementalMarker on_no_need,
3795 Mode mode) {
3796 Label on_black;
3797 Label need_incremental;
3798 Label need_incremental_pop_scratch;
3799
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003800 __ And(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
3801 __ lw(regs_.scratch1(),
3802 MemOperand(regs_.scratch0(),
3803 MemoryChunk::kWriteBarrierCounterOffset));
3804 __ Subu(regs_.scratch1(), regs_.scratch1(), Operand(1));
3805 __ sw(regs_.scratch1(),
3806 MemOperand(regs_.scratch0(),
3807 MemoryChunk::kWriteBarrierCounterOffset));
3808 __ Branch(&need_incremental, lt, regs_.scratch1(), Operand(zero_reg));
3809
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003810 // Let's look at the color of the object: If it is not black we don't have
3811 // to inform the incremental marker.
3812 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
3813
3814 regs_.Restore(masm);
3815 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003816 __ RememberedSetHelper(object(),
3817 address(),
3818 value(),
3819 save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003820 MacroAssembler::kReturnAtEnd);
3821 } else {
3822 __ Ret();
3823 }
3824
3825 __ bind(&on_black);
3826
3827 // Get the value from the slot.
3828 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
3829
3830 if (mode == INCREMENTAL_COMPACTION) {
3831 Label ensure_not_white;
3832
3833 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3834 regs_.scratch1(), // Scratch.
3835 MemoryChunk::kEvacuationCandidateMask,
3836 eq,
3837 &ensure_not_white);
3838
3839 __ CheckPageFlag(regs_.object(),
3840 regs_.scratch1(), // Scratch.
3841 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3842 eq,
3843 &need_incremental);
3844
3845 __ bind(&ensure_not_white);
3846 }
3847
3848 // We need extra registers for this, so we push the object and the address
3849 // register temporarily.
3850 __ Push(regs_.object(), regs_.address());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003851 __ JumpIfWhite(regs_.scratch0(), // The value.
3852 regs_.scratch1(), // Scratch.
3853 regs_.object(), // Scratch.
3854 regs_.address(), // Scratch.
3855 &need_incremental_pop_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003856 __ Pop(regs_.object(), regs_.address());
3857
3858 regs_.Restore(masm);
3859 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003860 __ RememberedSetHelper(object(),
3861 address(),
3862 value(),
3863 save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003864 MacroAssembler::kReturnAtEnd);
3865 } else {
3866 __ Ret();
3867 }
3868
3869 __ bind(&need_incremental_pop_scratch);
3870 __ Pop(regs_.object(), regs_.address());
3871
3872 __ bind(&need_incremental);
3873
3874 // Fall through when we need to inform the incremental marker.
3875}
3876
3877
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003878void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3879 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3880 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3881 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003882 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003883 __ lw(a1, MemOperand(fp, parameter_count_offset));
3884 if (function_mode() == JS_FUNCTION_STUB_MODE) {
3885 __ Addu(a1, a1, Operand(1));
3886 }
3887 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3888 __ sll(a1, a1, kPointerSizeLog2);
3889 __ Ret(USE_DELAY_SLOT);
3890 __ Addu(sp, sp, a1);
3891}
3892
3893
3894void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003895 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3896 LoadICStub stub(isolate(), state());
3897 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003898}
3899
3900
3901void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003902 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3903 KeyedLoadICStub stub(isolate(), state());
3904 stub.GenerateForTrampoline(masm);
3905}
3906
3907
3908void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3909 __ EmitLoadTypeFeedbackVector(a2);
3910 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003911 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3912}
3913
3914
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003915void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3916
3917
3918void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3919 GenerateImpl(masm, true);
3920}
3921
3922
3923static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3924 Register receiver_map, Register scratch1,
3925 Register scratch2, bool is_polymorphic,
3926 Label* miss) {
3927 // feedback initially contains the feedback array
3928 Label next_loop, prepare_next;
3929 Label start_polymorphic;
3930
3931 Register cached_map = scratch1;
3932
3933 __ lw(cached_map,
3934 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3935 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3936 __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map));
3937 // found, now call handler.
3938 Register handler = feedback;
3939 __ lw(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3940 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3941 __ Jump(t9);
3942
3943
3944 Register length = scratch2;
3945 __ bind(&start_polymorphic);
3946 __ lw(length, FieldMemOperand(feedback, FixedArray::kLengthOffset));
3947 if (!is_polymorphic) {
3948 // If the IC could be monomorphic we have to make sure we don't go past the
3949 // end of the feedback array.
3950 __ Branch(miss, eq, length, Operand(Smi::FromInt(2)));
3951 }
3952
3953 Register too_far = length;
3954 Register pointer_reg = feedback;
3955
3956 // +-----+------+------+-----+-----+ ... ----+
3957 // | map | len | wm0 | h0 | wm1 | hN |
3958 // +-----+------+------+-----+-----+ ... ----+
3959 // 0 1 2 len-1
3960 // ^ ^
3961 // | |
3962 // pointer_reg too_far
3963 // aka feedback scratch2
3964 // also need receiver_map
3965 // use cached_map (scratch1) to look in the weak map values.
Ben Murdoch097c5b22016-05-18 11:27:45 +01003966 __ Lsa(too_far, feedback, length, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003967 __ Addu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3968 __ Addu(pointer_reg, feedback,
3969 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag));
3970
3971 __ bind(&next_loop);
3972 __ lw(cached_map, MemOperand(pointer_reg));
3973 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3974 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
3975 __ lw(handler, MemOperand(pointer_reg, kPointerSize));
3976 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3977 __ Jump(t9);
3978
3979 __ bind(&prepare_next);
3980 __ Addu(pointer_reg, pointer_reg, Operand(kPointerSize * 2));
3981 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
3982
3983 // We exhausted our array of map handler pairs.
3984 __ jmp(miss);
3985}
3986
3987
3988static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3989 Register receiver_map, Register feedback,
3990 Register vector, Register slot,
3991 Register scratch, Label* compare_map,
3992 Label* load_smi_map, Label* try_array) {
3993 __ JumpIfSmi(receiver, load_smi_map);
3994 __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
3995 __ bind(compare_map);
3996 Register cached_map = scratch;
3997 // Move the weak map into the weak_cell register.
3998 __ lw(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset));
3999 __ Branch(try_array, ne, cached_map, Operand(receiver_map));
4000 Register handler = feedback;
4001
Ben Murdoch097c5b22016-05-18 11:27:45 +01004002 __ Lsa(handler, vector, slot, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004003 __ lw(handler,
4004 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
4005 __ Addu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
4006 __ Jump(t9);
4007}
4008
4009
4010void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4011 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
4012 Register name = LoadWithVectorDescriptor::NameRegister(); // a2
4013 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
4014 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
4015 Register feedback = t0;
4016 Register receiver_map = t1;
4017 Register scratch1 = t4;
4018
Ben Murdoch097c5b22016-05-18 11:27:45 +01004019 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004020 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4021
4022 // Try to quickly handle the monomorphic case without knowing for sure
4023 // if we have a weak cell in feedback. We do know it's safe to look
4024 // at WeakCell::kValueOffset.
4025 Label try_array, load_smi_map, compare_map;
4026 Label not_array, miss;
4027 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4028 scratch1, &compare_map, &load_smi_map, &try_array);
4029
4030 // Is it a fixed array?
4031 __ bind(&try_array);
4032 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4033 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4034 __ Branch(&not_array, ne, at, Operand(scratch1));
4035 HandleArrayCases(masm, feedback, receiver_map, scratch1, t5, true, &miss);
4036
4037 __ bind(&not_array);
4038 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
4039 __ Branch(&miss, ne, at, Operand(feedback));
4040 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4041 Code::ComputeHandlerFlags(Code::LOAD_IC));
4042 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
4043 receiver, name, feedback,
4044 receiver_map, scratch1, t5);
4045
4046 __ bind(&miss);
4047 LoadIC::GenerateMiss(masm);
4048
4049 __ bind(&load_smi_map);
4050 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4051 __ jmp(&compare_map);
4052}
4053
4054
4055void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4056 GenerateImpl(masm, false);
4057}
4058
4059
4060void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4061 GenerateImpl(masm, true);
4062}
4063
4064
4065void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4066 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // a1
4067 Register key = LoadWithVectorDescriptor::NameRegister(); // a2
4068 Register vector = LoadWithVectorDescriptor::VectorRegister(); // a3
4069 Register slot = LoadWithVectorDescriptor::SlotRegister(); // a0
4070 Register feedback = t0;
4071 Register receiver_map = t1;
4072 Register scratch1 = t4;
4073
Ben Murdoch097c5b22016-05-18 11:27:45 +01004074 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004075 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4076
4077 // Try to quickly handle the monomorphic case without knowing for sure
4078 // if we have a weak cell in feedback. We do know it's safe to look
4079 // at WeakCell::kValueOffset.
4080 Label try_array, load_smi_map, compare_map;
4081 Label not_array, miss;
4082 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4083 scratch1, &compare_map, &load_smi_map, &try_array);
4084
4085 __ bind(&try_array);
4086 // Is it a fixed array?
4087 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4088 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4089 __ Branch(&not_array, ne, at, Operand(scratch1));
4090 // We have a polymorphic element handler.
4091 __ JumpIfNotSmi(key, &miss);
4092
4093 Label polymorphic, try_poly_name;
4094 __ bind(&polymorphic);
4095 HandleArrayCases(masm, feedback, receiver_map, scratch1, t5, true, &miss);
4096
4097 __ bind(&not_array);
4098 // Is it generic?
4099 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
4100 __ Branch(&try_poly_name, ne, at, Operand(feedback));
4101 Handle<Code> megamorphic_stub =
4102 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4103 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
4104
4105 __ bind(&try_poly_name);
4106 // We might have a name in feedback, and a fixed array in the next slot.
4107 __ Branch(&miss, ne, key, Operand(feedback));
4108 // If the name comparison succeeded, we know we have a fixed array with
4109 // at least one map/handler pair.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004110 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004111 __ lw(feedback,
4112 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
4113 HandleArrayCases(masm, feedback, receiver_map, scratch1, t5, false, &miss);
4114
4115 __ bind(&miss);
4116 KeyedLoadIC::GenerateMiss(masm);
4117
4118 __ bind(&load_smi_map);
4119 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4120 __ jmp(&compare_map);
4121}
4122
4123
4124void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4125 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4126 VectorStoreICStub stub(isolate(), state());
4127 stub.GenerateForTrampoline(masm);
4128}
4129
4130
4131void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4132 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4133 VectorKeyedStoreICStub stub(isolate(), state());
4134 stub.GenerateForTrampoline(masm);
4135}
4136
4137
4138void VectorStoreICStub::Generate(MacroAssembler* masm) {
4139 GenerateImpl(masm, false);
4140}
4141
4142
4143void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4144 GenerateImpl(masm, true);
4145}
4146
4147
4148void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4149 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1
4150 Register key = VectorStoreICDescriptor::NameRegister(); // a2
4151 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3
4152 Register slot = VectorStoreICDescriptor::SlotRegister(); // t0
4153 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0
4154 Register feedback = t1;
4155 Register receiver_map = t2;
4156 Register scratch1 = t5;
4157
Ben Murdoch097c5b22016-05-18 11:27:45 +01004158 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004159 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4160
4161 // Try to quickly handle the monomorphic case without knowing for sure
4162 // if we have a weak cell in feedback. We do know it's safe to look
4163 // at WeakCell::kValueOffset.
4164 Label try_array, load_smi_map, compare_map;
4165 Label not_array, miss;
4166 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4167 scratch1, &compare_map, &load_smi_map, &try_array);
4168
4169 // Is it a fixed array?
4170 __ bind(&try_array);
4171 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4172 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4173 __ Branch(&not_array, ne, scratch1, Operand(at));
4174
4175 Register scratch2 = t4;
4176 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true,
4177 &miss);
4178
4179 __ bind(&not_array);
4180 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
4181 __ Branch(&miss, ne, feedback, Operand(at));
4182 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4183 Code::ComputeHandlerFlags(Code::STORE_IC));
4184 masm->isolate()->stub_cache()->GenerateProbe(
4185 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map,
4186 scratch1, scratch2);
4187
4188 __ bind(&miss);
4189 StoreIC::GenerateMiss(masm);
4190
4191 __ bind(&load_smi_map);
4192 __ Branch(USE_DELAY_SLOT, &compare_map);
4193 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot.
4194}
4195
4196
4197void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
4198 GenerateImpl(masm, false);
4199}
4200
4201
4202void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4203 GenerateImpl(masm, true);
4204}
4205
4206
4207static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
4208 Register receiver_map, Register scratch1,
4209 Register scratch2, Label* miss) {
4210 // feedback initially contains the feedback array
4211 Label next_loop, prepare_next;
4212 Label start_polymorphic;
4213 Label transition_call;
4214
4215 Register cached_map = scratch1;
4216 Register too_far = scratch2;
4217 Register pointer_reg = feedback;
4218 __ lw(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset));
4219
4220 // +-----+------+------+-----+-----+-----+ ... ----+
4221 // | map | len | wm0 | wt0 | h0 | wm1 | hN |
4222 // +-----+------+------+-----+-----+ ----+ ... ----+
4223 // 0 1 2 len-1
4224 // ^ ^
4225 // | |
4226 // pointer_reg too_far
4227 // aka feedback scratch2
4228 // also need receiver_map
4229 // use cached_map (scratch1) to look in the weak map values.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004230 __ Lsa(too_far, feedback, too_far, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004231 __ Addu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4232 __ Addu(pointer_reg, feedback,
4233 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag));
4234
4235 __ bind(&next_loop);
4236 __ lw(cached_map, MemOperand(pointer_reg));
4237 __ lw(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
4238 __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
4239 // Is it a transitioning store?
4240 __ lw(too_far, MemOperand(pointer_reg, kPointerSize));
4241 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4242 __ Branch(&transition_call, ne, too_far, Operand(at));
4243 __ lw(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2));
4244 __ Addu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
4245 __ Jump(t9);
4246
4247 __ bind(&transition_call);
4248 __ lw(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset));
4249 __ JumpIfSmi(too_far, miss);
4250
4251 __ lw(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
4252
4253 // Load the map into the correct register.
4254 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
4255 __ mov(feedback, too_far);
4256
4257 __ Addu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
4258 __ Jump(t9);
4259
4260 __ bind(&prepare_next);
4261 __ Addu(pointer_reg, pointer_reg, Operand(kPointerSize * 3));
4262 __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
4263
4264 // We exhausted our array of map handler pairs.
4265 __ jmp(miss);
4266}
4267
4268
4269void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4270 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // a1
4271 Register key = VectorStoreICDescriptor::NameRegister(); // a2
4272 Register vector = VectorStoreICDescriptor::VectorRegister(); // a3
4273 Register slot = VectorStoreICDescriptor::SlotRegister(); // t0
4274 DCHECK(VectorStoreICDescriptor::ValueRegister().is(a0)); // a0
4275 Register feedback = t1;
4276 Register receiver_map = t2;
4277 Register scratch1 = t5;
4278
Ben Murdoch097c5b22016-05-18 11:27:45 +01004279 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004280 __ lw(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4281
4282 // Try to quickly handle the monomorphic case without knowing for sure
4283 // if we have a weak cell in feedback. We do know it's safe to look
4284 // at WeakCell::kValueOffset.
4285 Label try_array, load_smi_map, compare_map;
4286 Label not_array, miss;
4287 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4288 scratch1, &compare_map, &load_smi_map, &try_array);
4289
4290 __ bind(&try_array);
4291 // Is it a fixed array?
4292 __ lw(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4293 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4294 __ Branch(&not_array, ne, scratch1, Operand(at));
4295
4296 // We have a polymorphic element handler.
4297 Label polymorphic, try_poly_name;
4298 __ bind(&polymorphic);
4299
4300 Register scratch2 = t4;
4301
4302 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2,
4303 &miss);
4304
4305 __ bind(&not_array);
4306 // Is it generic?
4307 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
4308 __ Branch(&try_poly_name, ne, feedback, Operand(at));
4309 Handle<Code> megamorphic_stub =
4310 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4311 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
4312
4313 __ bind(&try_poly_name);
4314 // We might have a name in feedback, and a fixed array in the next slot.
4315 __ Branch(&miss, ne, key, Operand(feedback));
4316 // If the name comparison succeeded, we know we have a fixed array with
4317 // at least one map/handler pair.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004318 __ Lsa(feedback, vector, slot, kPointerSizeLog2 - kSmiTagSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004319 __ lw(feedback,
4320 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
4321 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false,
4322 &miss);
4323
4324 __ bind(&miss);
4325 KeyedStoreIC::GenerateMiss(masm);
4326
4327 __ bind(&load_smi_map);
4328 __ Branch(USE_DELAY_SLOT, &compare_map);
4329 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot.
4330}
4331
4332
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004333void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4334 if (masm->isolate()->function_entry_hook() != NULL) {
4335 ProfileEntryHookStub stub(masm->isolate());
4336 __ push(ra);
4337 __ CallStub(&stub);
4338 __ pop(ra);
4339 }
4340}
4341
4342
4343void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4344 // The entry hook is a "push ra" instruction, followed by a call.
4345 // Note: on MIPS "push" is 2 instruction
4346 const int32_t kReturnAddressDistanceFromFunctionStart =
4347 Assembler::kCallTargetAddressOffset + (2 * Assembler::kInstrSize);
4348
4349 // This should contain all kJSCallerSaved registers.
4350 const RegList kSavedRegs =
4351 kJSCallerSaved | // Caller saved registers.
4352 s5.bit(); // Saved stack pointer.
4353
4354 // We also save ra, so the count here is one higher than the mask indicates.
4355 const int32_t kNumSavedRegs = kNumJSCallerSaved + 2;
4356
4357 // Save all caller-save registers as this may be called from anywhere.
4358 __ MultiPush(kSavedRegs | ra.bit());
4359
4360 // Compute the function's address for the first argument.
4361 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
4362
4363 // The caller's return address is above the saved temporaries.
4364 // Grab that for the second argument to the hook.
4365 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize));
4366
4367 // Align the stack if necessary.
4368 int frame_alignment = masm->ActivationFrameAlignment();
4369 if (frame_alignment > kPointerSize) {
4370 __ mov(s5, sp);
4371 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
4372 __ And(sp, sp, Operand(-frame_alignment));
4373 }
4374 __ Subu(sp, sp, kCArgsSlotsSize);
4375#if defined(V8_HOST_ARCH_MIPS)
4376 int32_t entry_hook =
4377 reinterpret_cast<int32_t>(isolate()->function_entry_hook());
4378 __ li(t9, Operand(entry_hook));
4379#else
4380 // Under the simulator we need to indirect the entry hook through a
4381 // trampoline function at a known address.
4382 // It additionally takes an isolate as a third parameter.
4383 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4384
4385 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
4386 __ li(t9, Operand(ExternalReference(&dispatcher,
4387 ExternalReference::BUILTIN_CALL,
4388 isolate())));
4389#endif
4390 // Call C function through t9 to conform ABI for PIC.
4391 __ Call(t9);
4392
4393 // Restore the stack pointer if needed.
4394 if (frame_alignment > kPointerSize) {
4395 __ mov(sp, s5);
4396 } else {
4397 __ Addu(sp, sp, kCArgsSlotsSize);
4398 }
4399
4400 // Also pop ra to get Ret(0).
4401 __ MultiPop(kSavedRegs | ra.bit());
4402 __ Ret();
4403}
4404
4405
4406template<class T>
4407static void CreateArrayDispatch(MacroAssembler* masm,
4408 AllocationSiteOverrideMode mode) {
4409 if (mode == DISABLE_ALLOCATION_SITES) {
4410 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4411 __ TailCallStub(&stub);
4412 } else if (mode == DONT_OVERRIDE) {
4413 int last_index = GetSequenceIndexFromFastElementsKind(
4414 TERMINAL_FAST_ELEMENTS_KIND);
4415 for (int i = 0; i <= last_index; ++i) {
4416 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4417 T stub(masm->isolate(), kind);
4418 __ TailCallStub(&stub, eq, a3, Operand(kind));
4419 }
4420
4421 // If we reached this point there is a problem.
4422 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4423 } else {
4424 UNREACHABLE();
4425 }
4426}
4427
4428
4429static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4430 AllocationSiteOverrideMode mode) {
4431 // a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4432 // a3 - kind (if mode != DISABLE_ALLOCATION_SITES)
4433 // a0 - number of arguments
4434 // a1 - constructor?
4435 // sp[0] - last argument
4436 Label normal_sequence;
4437 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004438 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4439 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4440 STATIC_ASSERT(FAST_ELEMENTS == 2);
4441 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4442 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4443 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004444
4445 // is the low bit set? If so, we are holey and that is good.
4446 __ And(at, a3, Operand(1));
4447 __ Branch(&normal_sequence, ne, at, Operand(zero_reg));
4448 }
4449
4450 // look at the first argument
4451 __ lw(t1, MemOperand(sp, 0));
4452 __ Branch(&normal_sequence, eq, t1, Operand(zero_reg));
4453
4454 if (mode == DISABLE_ALLOCATION_SITES) {
4455 ElementsKind initial = GetInitialFastElementsKind();
4456 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4457
4458 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4459 holey_initial,
4460 DISABLE_ALLOCATION_SITES);
4461 __ TailCallStub(&stub_holey);
4462
4463 __ bind(&normal_sequence);
4464 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4465 initial,
4466 DISABLE_ALLOCATION_SITES);
4467 __ TailCallStub(&stub);
4468 } else if (mode == DONT_OVERRIDE) {
4469 // We are going to create a holey array, but our kind is non-holey.
4470 // Fix kind and retry (only if we have an allocation site in the slot).
4471 __ Addu(a3, a3, Operand(1));
4472
4473 if (FLAG_debug_code) {
4474 __ lw(t1, FieldMemOperand(a2, 0));
4475 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
4476 __ Assert(eq, kExpectedAllocationSite, t1, Operand(at));
4477 }
4478
4479 // Save the resulting elements kind in type info. We can't just store a3
4480 // in the AllocationSite::transition_info field because elements kind is
4481 // restricted to a portion of the field...upper bits need to be left alone.
4482 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4483 __ lw(t0, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4484 __ Addu(t0, t0, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
4485 __ sw(t0, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4486
4487
4488 __ bind(&normal_sequence);
4489 int last_index = GetSequenceIndexFromFastElementsKind(
4490 TERMINAL_FAST_ELEMENTS_KIND);
4491 for (int i = 0; i <= last_index; ++i) {
4492 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4493 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4494 __ TailCallStub(&stub, eq, a3, Operand(kind));
4495 }
4496
4497 // If we reached this point there is a problem.
4498 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4499 } else {
4500 UNREACHABLE();
4501 }
4502}
4503
4504
4505template<class T>
4506static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4507 int to_index = GetSequenceIndexFromFastElementsKind(
4508 TERMINAL_FAST_ELEMENTS_KIND);
4509 for (int i = 0; i <= to_index; ++i) {
4510 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4511 T stub(isolate, kind);
4512 stub.GetCode();
4513 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4514 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4515 stub1.GetCode();
4516 }
4517 }
4518}
4519
4520
4521void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4522 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4523 isolate);
4524 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4525 isolate);
4526 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4527 isolate);
4528}
4529
4530
4531void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4532 Isolate* isolate) {
4533 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4534 for (int i = 0; i < 2; i++) {
4535 // For internal arrays we only need a few things.
4536 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4537 stubh1.GetCode();
4538 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4539 stubh2.GetCode();
4540 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4541 stubh3.GetCode();
4542 }
4543}
4544
4545
4546void ArrayConstructorStub::GenerateDispatchToArrayStub(
4547 MacroAssembler* masm,
4548 AllocationSiteOverrideMode mode) {
4549 if (argument_count() == ANY) {
4550 Label not_zero_case, not_one_case;
4551 __ And(at, a0, a0);
4552 __ Branch(&not_zero_case, ne, at, Operand(zero_reg));
4553 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4554
4555 __ bind(&not_zero_case);
4556 __ Branch(&not_one_case, gt, a0, Operand(1));
4557 CreateArrayDispatchOneArgument(masm, mode);
4558
4559 __ bind(&not_one_case);
4560 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4561 } else if (argument_count() == NONE) {
4562 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4563 } else if (argument_count() == ONE) {
4564 CreateArrayDispatchOneArgument(masm, mode);
4565 } else if (argument_count() == MORE_THAN_ONE) {
4566 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4567 } else {
4568 UNREACHABLE();
4569 }
4570}
4571
4572
4573void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4574 // ----------- S t a t e -------------
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004575 // -- a0 : argc (only if argument_count() is ANY or MORE_THAN_ONE)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004576 // -- a1 : constructor
4577 // -- a2 : AllocationSite or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004578 // -- a3 : Original constructor
4579 // -- sp[0] : last argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004580 // -----------------------------------
4581
4582 if (FLAG_debug_code) {
4583 // The array construct code is only set for the global and natives
4584 // builtin Array functions which always have maps.
4585
4586 // Initial map for the builtin Array function should be a map.
4587 __ lw(t0, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4588 // Will both indicate a NULL and a Smi.
4589 __ SmiTst(t0, at);
4590 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
4591 at, Operand(zero_reg));
4592 __ GetObjectType(t0, t0, t1);
4593 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
4594 t1, Operand(MAP_TYPE));
4595
4596 // We should either have undefined in a2 or a valid AllocationSite
4597 __ AssertUndefinedOrAllocationSite(a2, t0);
4598 }
4599
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004600 // Enter the context of the Array function.
4601 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
4602
4603 Label subclassing;
4604 __ Branch(&subclassing, ne, a1, Operand(a3));
4605
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004606 Label no_info;
4607 // Get the elements kind and case on that.
4608 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4609 __ Branch(&no_info, eq, a2, Operand(at));
4610
4611 __ lw(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset));
4612 __ SmiUntag(a3);
4613 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4614 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask));
4615 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4616
4617 __ bind(&no_info);
4618 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004619
4620 // Subclassing.
4621 __ bind(&subclassing);
4622 switch (argument_count()) {
4623 case ANY:
4624 case MORE_THAN_ONE:
Ben Murdoch097c5b22016-05-18 11:27:45 +01004625 __ Lsa(at, sp, a0, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004626 __ sw(a1, MemOperand(at));
4627 __ li(at, Operand(3));
4628 __ addu(a0, a0, at);
4629 break;
4630 case NONE:
4631 __ sw(a1, MemOperand(sp, 0 * kPointerSize));
4632 __ li(a0, Operand(3));
4633 break;
4634 case ONE:
4635 __ sw(a1, MemOperand(sp, 1 * kPointerSize));
4636 __ li(a0, Operand(4));
4637 break;
4638 }
4639 __ Push(a3, a2);
4640 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004641}
4642
4643
4644void InternalArrayConstructorStub::GenerateCase(
4645 MacroAssembler* masm, ElementsKind kind) {
4646
4647 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4648 __ TailCallStub(&stub0, lo, a0, Operand(1));
4649
4650 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4651 __ TailCallStub(&stubN, hi, a0, Operand(1));
4652
4653 if (IsFastPackedElementsKind(kind)) {
4654 // We might need to create a holey array
4655 // look at the first argument.
4656 __ lw(at, MemOperand(sp, 0));
4657
4658 InternalArraySingleArgumentConstructorStub
4659 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4660 __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg));
4661 }
4662
4663 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4664 __ TailCallStub(&stub1);
4665}
4666
4667
4668void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4669 // ----------- S t a t e -------------
4670 // -- a0 : argc
4671 // -- a1 : constructor
4672 // -- sp[0] : return address
4673 // -- sp[4] : last argument
4674 // -----------------------------------
4675
4676 if (FLAG_debug_code) {
4677 // The array construct code is only set for the global and natives
4678 // builtin Array functions which always have maps.
4679
4680 // Initial map for the builtin Array function should be a map.
4681 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4682 // Will both indicate a NULL and a Smi.
4683 __ SmiTst(a3, at);
4684 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
4685 at, Operand(zero_reg));
4686 __ GetObjectType(a3, a3, t0);
4687 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
4688 t0, Operand(MAP_TYPE));
4689 }
4690
4691 // Figure out the right elements kind.
4692 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
4693
4694 // Load the map's "bit field 2" into a3. We only need the first byte,
4695 // but the following bit field extraction takes care of that anyway.
4696 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
4697 // Retrieve elements_kind from bit field 2.
4698 __ DecodeField<Map::ElementsKindBits>(a3);
4699
4700 if (FLAG_debug_code) {
4701 Label done;
4702 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
4703 __ Assert(
4704 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
4705 a3, Operand(FAST_HOLEY_ELEMENTS));
4706 __ bind(&done);
4707 }
4708
4709 Label fast_elements_case;
4710 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS));
4711 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4712
4713 __ bind(&fast_elements_case);
4714 GenerateCase(masm, FAST_ELEMENTS);
4715}
4716
4717
Ben Murdoch097c5b22016-05-18 11:27:45 +01004718void FastNewObjectStub::Generate(MacroAssembler* masm) {
4719 // ----------- S t a t e -------------
4720 // -- a1 : target
4721 // -- a3 : new target
4722 // -- cp : context
4723 // -- ra : return address
4724 // -----------------------------------
4725 __ AssertFunction(a1);
4726 __ AssertReceiver(a3);
4727
4728 // Verify that the new target is a JSFunction.
4729 Label new_object;
4730 __ GetObjectType(a3, a2, a2);
4731 __ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE));
4732
4733 // Load the initial map and verify that it's in fact a map.
4734 __ lw(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
4735 __ JumpIfSmi(a2, &new_object);
4736 __ GetObjectType(a2, a0, a0);
4737 __ Branch(&new_object, ne, a0, Operand(MAP_TYPE));
4738
4739 // Fall back to runtime if the target differs from the new target's
4740 // initial map constructor.
4741 __ lw(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
4742 __ Branch(&new_object, ne, a0, Operand(a1));
4743
4744 // Allocate the JSObject on the heap.
4745 Label allocate, done_allocate;
4746 __ lbu(t0, FieldMemOperand(a2, Map::kInstanceSizeOffset));
4747 __ Allocate(t0, v0, t1, a0, &allocate, SIZE_IN_WORDS);
4748 __ bind(&done_allocate);
4749
4750 // Initialize the JSObject fields.
4751 __ sw(a2, MemOperand(v0, JSObject::kMapOffset));
4752 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
4753 __ sw(a3, MemOperand(v0, JSObject::kPropertiesOffset));
4754 __ sw(a3, MemOperand(v0, JSObject::kElementsOffset));
4755 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
4756 __ Addu(a1, v0, Operand(JSObject::kHeaderSize));
4757
4758 // ----------- S t a t e -------------
4759 // -- v0 : result (untagged)
4760 // -- a1 : result fields (untagged)
4761 // -- t1 : result end (untagged)
4762 // -- a2 : initial map
4763 // -- cp : context
4764 // -- ra : return address
4765 // -----------------------------------
4766
4767 // Perform in-object slack tracking if requested.
4768 Label slack_tracking;
4769 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4770 __ lw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
4771 __ And(at, a3, Operand(Map::ConstructionCounter::kMask));
4772 __ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(0));
4773 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot.
4774 {
4775 // Initialize all in-object fields with undefined.
4776 __ InitializeFieldsWithFiller(a1, t1, a0);
4777
4778 // Add the object tag to make the JSObject real.
4779 STATIC_ASSERT(kHeapObjectTag == 1);
4780 __ Ret(USE_DELAY_SLOT);
4781 __ Addu(v0, v0, Operand(kHeapObjectTag)); // In delay slot.
4782 }
4783 __ bind(&slack_tracking);
4784 {
4785 // Decrease generous allocation count.
4786 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4787 __ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift));
4788 __ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
4789
4790 // Initialize the in-object fields with undefined.
4791 __ lbu(t0, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
4792 __ sll(t0, t0, kPointerSizeLog2);
4793 __ subu(t0, t1, t0);
4794 __ InitializeFieldsWithFiller(a1, t0, a0);
4795
4796 // Initialize the remaining (reserved) fields with one pointer filler map.
4797 __ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex);
4798 __ InitializeFieldsWithFiller(a1, t1, a0);
4799
4800 // Check if we can finalize the instance size.
4801 Label finalize;
4802 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4803 __ And(a3, a3, Operand(Map::ConstructionCounter::kMask));
4804 __ Branch(USE_DELAY_SLOT, &finalize, eq, a3, Operand(zero_reg));
4805 STATIC_ASSERT(kHeapObjectTag == 1);
4806 __ Addu(v0, v0, Operand(kHeapObjectTag)); // In delay slot.
4807 __ Ret();
4808
4809 // Finalize the instance size.
4810 __ bind(&finalize);
4811 {
4812 FrameScope scope(masm, StackFrame::INTERNAL);
4813 __ Push(v0, a2);
4814 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4815 __ Pop(v0);
4816 }
4817 __ Ret();
4818 }
4819
4820 // Fall back to %AllocateInNewSpace.
4821 __ bind(&allocate);
4822 {
4823 FrameScope scope(masm, StackFrame::INTERNAL);
4824 STATIC_ASSERT(kSmiTag == 0);
4825 STATIC_ASSERT(kSmiTagSize == 1);
4826 __ sll(t0, t0, kPointerSizeLog2 + kSmiTagSize);
4827 __ Push(a2, t0);
4828 __ CallRuntime(Runtime::kAllocateInNewSpace);
4829 __ Pop(a2);
4830 }
4831 STATIC_ASSERT(kHeapObjectTag == 1);
4832 __ Subu(v0, v0, Operand(kHeapObjectTag));
4833 __ lbu(t1, FieldMemOperand(a2, Map::kInstanceSizeOffset));
4834 __ Lsa(t1, v0, t1, kPointerSizeLog2);
4835 __ jmp(&done_allocate);
4836
4837 // Fall back to %NewObject.
4838 __ bind(&new_object);
4839 __ Push(a1, a3);
4840 __ TailCallRuntime(Runtime::kNewObject);
4841}
4842
4843
4844void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4845 // ----------- S t a t e -------------
4846 // -- a1 : function
4847 // -- cp : context
4848 // -- fp : frame pointer
4849 // -- ra : return address
4850 // -----------------------------------
4851 __ AssertFunction(a1);
4852
4853 // For Ignition we need to skip all possible handler/stub frames until
4854 // we reach the JavaScript frame for the function (similar to what the
4855 // runtime fallback implementation does). So make a2 point to that
4856 // JavaScript frame.
4857 {
4858 Label loop, loop_entry;
4859 __ Branch(USE_DELAY_SLOT, &loop_entry);
4860 __ mov(a2, fp); // In delay slot.
4861 __ bind(&loop);
4862 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
4863 __ bind(&loop_entry);
Ben Murdochda12d292016-06-02 14:46:10 +01004864 __ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004865 __ Branch(&loop, ne, a1, Operand(a3));
4866 }
4867
4868 // Check if we have rest parameters (only possible if we have an
4869 // arguments adaptor frame below the function frame).
4870 Label no_rest_parameters;
4871 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004872 __ lw(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004873 __ Branch(&no_rest_parameters, ne, a3,
4874 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4875
4876 // Check if the arguments adaptor frame contains more arguments than
4877 // specified by the function's internal formal parameter count.
4878 Label rest_parameters;
4879 __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4880 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4881 __ lw(a1,
4882 FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset));
4883 __ Subu(a0, a0, Operand(a1));
4884 __ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
4885
4886 // Return an empty rest parameter array.
4887 __ bind(&no_rest_parameters);
4888 {
4889 // ----------- S t a t e -------------
4890 // -- cp : context
4891 // -- ra : return address
4892 // -----------------------------------
4893
4894 // Allocate an empty rest parameter array.
4895 Label allocate, done_allocate;
4896 __ Allocate(JSArray::kSize, v0, a0, a1, &allocate, TAG_OBJECT);
4897 __ bind(&done_allocate);
4898
4899 // Setup the rest parameter array in v0.
4900 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1);
4901 __ sw(a1, FieldMemOperand(v0, JSArray::kMapOffset));
4902 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
4903 __ sw(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
4904 __ sw(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
4905 __ Move(a1, Smi::FromInt(0));
4906 __ Ret(USE_DELAY_SLOT);
4907 __ sw(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
4908 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4909
4910 // Fall back to %AllocateInNewSpace.
4911 __ bind(&allocate);
4912 {
4913 FrameScope scope(masm, StackFrame::INTERNAL);
4914 __ Push(Smi::FromInt(JSArray::kSize));
4915 __ CallRuntime(Runtime::kAllocateInNewSpace);
4916 }
4917 __ jmp(&done_allocate);
4918 }
4919
4920 __ bind(&rest_parameters);
4921 {
4922 // Compute the pointer to the first rest parameter (skippping the receiver).
4923 __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1);
4924 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
4925 1 * kPointerSize));
4926
4927 // ----------- S t a t e -------------
4928 // -- cp : context
4929 // -- a0 : number of rest parameters (tagged)
4930 // -- a2 : pointer to first rest parameters
4931 // -- ra : return address
4932 // -----------------------------------
4933
4934 // Allocate space for the rest parameter array plus the backing store.
4935 Label allocate, done_allocate;
4936 __ li(a1, Operand(JSArray::kSize + FixedArray::kHeaderSize));
4937 __ Lsa(a1, a1, a0, kPointerSizeLog2 - 1);
4938 __ Allocate(a1, v0, a3, t0, &allocate, TAG_OBJECT);
4939 __ bind(&done_allocate);
4940
4941 // Setup the elements array in v0.
4942 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4943 __ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset));
4944 __ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset));
4945 __ Addu(a3, v0, Operand(FixedArray::kHeaderSize));
4946 {
4947 Label loop, done_loop;
4948 __ sll(at, a0, kPointerSizeLog2 - 1);
4949 __ Addu(a1, a3, at);
4950 __ bind(&loop);
4951 __ Branch(&done_loop, eq, a1, Operand(a3));
4952 __ lw(at, MemOperand(a2, 0 * kPointerSize));
4953 __ sw(at, FieldMemOperand(a3, 0 * kPointerSize));
4954 __ Subu(a2, a2, Operand(1 * kPointerSize));
4955 __ Addu(a3, a3, Operand(1 * kPointerSize));
4956 __ jmp(&loop);
4957 __ bind(&done_loop);
4958 }
4959
4960 // Setup the rest parameter array in a3.
4961 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at);
4962 __ sw(at, FieldMemOperand(a3, JSArray::kMapOffset));
4963 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
4964 __ sw(at, FieldMemOperand(a3, JSArray::kPropertiesOffset));
4965 __ sw(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
4966 __ sw(a0, FieldMemOperand(a3, JSArray::kLengthOffset));
4967 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4968 __ Ret(USE_DELAY_SLOT);
4969 __ mov(v0, a3); // In delay slot
4970
4971 // Fall back to %AllocateInNewSpace.
4972 __ bind(&allocate);
4973 {
4974 FrameScope scope(masm, StackFrame::INTERNAL);
4975 __ SmiTag(a1);
4976 __ Push(a0, a2, a1);
4977 __ CallRuntime(Runtime::kAllocateInNewSpace);
4978 __ Pop(a0, a2);
4979 }
4980 __ jmp(&done_allocate);
4981 }
4982}
4983
4984
4985void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4986 // ----------- S t a t e -------------
4987 // -- a1 : function
4988 // -- cp : context
4989 // -- fp : frame pointer
4990 // -- ra : return address
4991 // -----------------------------------
4992 __ AssertFunction(a1);
4993
4994 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4995 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4996 __ lw(a2,
4997 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
4998 __ Lsa(a3, fp, a2, kPointerSizeLog2 - 1);
4999 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
5000
5001 // a1 : function
5002 // a2 : number of parameters (tagged)
5003 // a3 : parameters pointer
5004 // Registers used over whole function:
5005 // t1 : arguments count (tagged)
5006 // t2 : mapped parameter count (tagged)
5007
5008 // Check if the calling frame is an arguments adaptor frame.
5009 Label adaptor_frame, try_allocate, runtime;
5010 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01005011 __ lw(a0, MemOperand(t0, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005012 __ Branch(&adaptor_frame, eq, a0,
5013 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5014
5015 // No adaptor, parameter count = argument count.
5016 __ mov(t1, a2);
5017 __ Branch(USE_DELAY_SLOT, &try_allocate);
5018 __ mov(t2, a2); // In delay slot.
5019
5020 // We have an adaptor frame. Patch the parameters pointer.
5021 __ bind(&adaptor_frame);
5022 __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset));
5023 __ Lsa(t0, t0, t1, 1);
5024 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset));
5025
5026 // t1 = argument count (tagged)
5027 // t2 = parameter count (tagged)
5028 // Compute the mapped parameter count = min(t2, t1) in t2.
5029 __ mov(t2, a2);
5030 __ Branch(&try_allocate, le, t2, Operand(t1));
5031 __ mov(t2, t1);
5032
5033 __ bind(&try_allocate);
5034
5035 // Compute the sizes of backing store, parameter map, and arguments object.
5036 // 1. Parameter map, has 2 extra words containing context and backing store.
5037 const int kParameterMapHeaderSize =
5038 FixedArray::kHeaderSize + 2 * kPointerSize;
5039 // If there are no mapped parameters, we do not need the parameter_map.
5040 Label param_map_size;
5041 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
5042 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, t2, Operand(zero_reg));
5043 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0.
5044 __ sll(t5, t2, 1);
5045 __ addiu(t5, t5, kParameterMapHeaderSize);
5046 __ bind(&param_map_size);
5047
5048 // 2. Backing store.
5049 __ Lsa(t5, t5, t1, 1);
5050 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize));
5051
5052 // 3. Arguments object.
5053 __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize));
5054
5055 // Do the allocation of all three objects in one go.
5056 __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT);
5057
5058 // v0 = address of new object(s) (tagged)
5059 // a2 = argument count (smi-tagged)
5060 // Get the arguments boilerplate from the current native context into t0.
5061 const int kNormalOffset =
5062 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
5063 const int kAliasedOffset =
5064 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
5065
5066 __ lw(t0, NativeContextMemOperand());
5067 Label skip2_ne, skip2_eq;
5068 __ Branch(&skip2_ne, ne, t2, Operand(zero_reg));
5069 __ lw(t0, MemOperand(t0, kNormalOffset));
5070 __ bind(&skip2_ne);
5071
5072 __ Branch(&skip2_eq, eq, t2, Operand(zero_reg));
5073 __ lw(t0, MemOperand(t0, kAliasedOffset));
5074 __ bind(&skip2_eq);
5075
5076 // v0 = address of new object (tagged)
5077 // a2 = argument count (smi-tagged)
5078 // t0 = address of arguments map (tagged)
5079 // t2 = mapped parameter count (tagged)
5080 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset));
5081 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex);
5082 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset));
5083 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset));
5084
5085 // Set up the callee in-object property.
5086 __ AssertNotSmi(a1);
5087 __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
5088
5089 // Use the length (smi tagged) and set that as an in-object property too.
5090 __ AssertSmi(t1);
5091 __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
5092
5093 // Set up the elements pointer in the allocated arguments object.
5094 // If we allocated a parameter map, t0 will point there, otherwise
5095 // it will point to the backing store.
5096 __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize));
5097 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
5098
5099 // v0 = address of new object (tagged)
5100 // a2 = argument count (tagged)
5101 // t0 = address of parameter map or backing store (tagged)
5102 // t2 = mapped parameter count (tagged)
5103 // Initialize parameter map. If there are no mapped arguments, we're done.
5104 Label skip_parameter_map;
5105 Label skip3;
5106 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0)));
5107 // Move backing store address to a1, because it is
5108 // expected there when filling in the unmapped arguments.
5109 __ mov(a1, t0);
5110 __ bind(&skip3);
5111
5112 __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0)));
5113
5114 __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex);
5115 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset));
5116 __ Addu(t1, t2, Operand(Smi::FromInt(2)));
5117 __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
5118 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize));
5119 __ Lsa(t1, t0, t2, 1);
5120 __ Addu(t1, t1, Operand(kParameterMapHeaderSize));
5121 __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize));
5122
5123 // Copy the parameter slots and the holes in the arguments.
5124 // We need to fill in mapped_parameter_count slots. They index the context,
5125 // where parameters are stored in reverse order, at
5126 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
5127 // The mapped parameter thus need to get indices
5128 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
5129 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
5130 // We loop from right to left.
5131 Label parameters_loop, parameters_test;
5132 __ mov(t1, t2);
5133 __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
5134 __ Subu(t5, t5, Operand(t2));
5135 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
5136 __ Lsa(a1, t0, t1, 1);
5137 __ Addu(a1, a1, Operand(kParameterMapHeaderSize));
5138
5139 // a1 = address of backing store (tagged)
5140 // t0 = address of parameter map (tagged)
5141 // a0 = temporary scratch (a.o., for address calculation)
5142 // t1 = loop variable (tagged)
5143 // t3 = the hole value
5144 __ jmp(&parameters_test);
5145
5146 __ bind(&parameters_loop);
5147 __ Subu(t1, t1, Operand(Smi::FromInt(1)));
5148 __ sll(a0, t1, 1);
5149 __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
5150 __ Addu(t6, t0, a0);
5151 __ sw(t5, MemOperand(t6));
5152 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
5153 __ Addu(t6, a1, a0);
5154 __ sw(t3, MemOperand(t6));
5155 __ Addu(t5, t5, Operand(Smi::FromInt(1)));
5156 __ bind(&parameters_test);
5157 __ Branch(&parameters_loop, ne, t1, Operand(Smi::FromInt(0)));
5158
5159 // t1 = argument count (tagged).
5160 __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
5161
5162 __ bind(&skip_parameter_map);
5163 // v0 = address of new object (tagged)
5164 // a1 = address of backing store (tagged)
5165 // t1 = argument count (tagged)
5166 // t2 = mapped parameter count (tagged)
5167 // t5 = scratch
5168 // Copy arguments header and remaining slots (if there are any).
5169 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
5170 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset));
5171 __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset));
5172
5173 Label arguments_loop, arguments_test;
5174 __ sll(t6, t2, 1);
5175 __ Subu(a3, a3, Operand(t6));
5176 __ jmp(&arguments_test);
5177
5178 __ bind(&arguments_loop);
5179 __ Subu(a3, a3, Operand(kPointerSize));
5180 __ lw(t0, MemOperand(a3, 0));
5181 __ Lsa(t5, a1, t2, 1);
5182 __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize));
5183 __ Addu(t2, t2, Operand(Smi::FromInt(1)));
5184
5185 __ bind(&arguments_test);
5186 __ Branch(&arguments_loop, lt, t2, Operand(t1));
5187
5188 // Return.
5189 __ Ret();
5190
5191 // Do the runtime call to allocate the arguments object.
5192 // t1 = argument count (tagged)
5193 __ bind(&runtime);
5194 __ Push(a1, a3, t1);
5195 __ TailCallRuntime(Runtime::kNewSloppyArguments);
5196}
5197
5198
5199void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
5200 // ----------- S t a t e -------------
5201 // -- a1 : function
5202 // -- cp : context
5203 // -- fp : frame pointer
5204 // -- ra : return address
5205 // -----------------------------------
5206 __ AssertFunction(a1);
5207
5208 // For Ignition we need to skip all possible handler/stub frames until
5209 // we reach the JavaScript frame for the function (similar to what the
5210 // runtime fallback implementation does). So make a2 point to that
5211 // JavaScript frame.
5212 {
5213 Label loop, loop_entry;
5214 __ Branch(USE_DELAY_SLOT, &loop_entry);
5215 __ mov(a2, fp); // In delay slot.
5216 __ bind(&loop);
5217 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
5218 __ bind(&loop_entry);
Ben Murdochda12d292016-06-02 14:46:10 +01005219 __ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005220 __ Branch(&loop, ne, a1, Operand(a3));
5221 }
5222
5223 // Check if we have an arguments adaptor frame below the function frame.
5224 Label arguments_adaptor, arguments_done;
5225 __ lw(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01005226 __ lw(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005227 __ Branch(&arguments_adaptor, eq, a0,
5228 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5229 {
5230 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
5231 __ lw(a0,
5232 FieldMemOperand(a1, SharedFunctionInfo::kFormalParameterCountOffset));
5233 __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1);
5234 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
5235 1 * kPointerSize));
5236 }
5237 __ Branch(&arguments_done);
5238 __ bind(&arguments_adaptor);
5239 {
5240 __ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
5241 __ Lsa(a2, a3, a0, kPointerSizeLog2 - 1);
5242 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
5243 1 * kPointerSize));
5244 }
5245 __ bind(&arguments_done);
5246
5247 // ----------- S t a t e -------------
5248 // -- cp : context
5249 // -- a0 : number of rest parameters (tagged)
5250 // -- a2 : pointer to first rest parameters
5251 // -- ra : return address
5252 // -----------------------------------
5253
5254 // Allocate space for the strict arguments object plus the backing store.
5255 Label allocate, done_allocate;
5256 __ li(a1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
5257 __ Lsa(a1, a1, a0, kPointerSizeLog2 - 1);
5258 __ Allocate(a1, v0, a3, t0, &allocate, TAG_OBJECT);
5259 __ bind(&done_allocate);
5260
5261 // Setup the elements array in v0.
5262 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
5263 __ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset));
5264 __ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset));
5265 __ Addu(a3, v0, Operand(FixedArray::kHeaderSize));
5266 {
5267 Label loop, done_loop;
5268 __ sll(at, a0, kPointerSizeLog2 - 1);
5269 __ Addu(a1, a3, at);
5270 __ bind(&loop);
5271 __ Branch(&done_loop, eq, a1, Operand(a3));
5272 __ lw(at, MemOperand(a2, 0 * kPointerSize));
5273 __ sw(at, FieldMemOperand(a3, 0 * kPointerSize));
5274 __ Subu(a2, a2, Operand(1 * kPointerSize));
5275 __ Addu(a3, a3, Operand(1 * kPointerSize));
5276 __ Branch(&loop);
5277 __ bind(&done_loop);
5278 }
5279
5280 // Setup the strict arguments object in a3.
5281 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at);
5282 __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset));
5283 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
5284 __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset));
5285 __ sw(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset));
5286 __ sw(a0, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset));
5287 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
5288 __ Ret(USE_DELAY_SLOT);
5289 __ mov(v0, a3); // In delay slot
5290
5291 // Fall back to %AllocateInNewSpace.
5292 __ bind(&allocate);
5293 {
5294 FrameScope scope(masm, StackFrame::INTERNAL);
5295 __ SmiTag(a1);
5296 __ Push(a0, a2, a1);
5297 __ CallRuntime(Runtime::kAllocateInNewSpace);
5298 __ Pop(a0, a2);
5299 }
5300 __ jmp(&done_allocate);
5301}
5302
5303
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005304void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
5305 Register context_reg = cp;
5306 Register slot_reg = a2;
5307 Register result_reg = v0;
5308 Label slow_case;
5309
5310 // Go up context chain to the script context.
5311 for (int i = 0; i < depth(); ++i) {
5312 __ lw(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
5313 context_reg = result_reg;
5314 }
5315
5316 // Load the PropertyCell value at the specified slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005317 __ Lsa(at, context_reg, slot_reg, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005318 __ lw(result_reg, ContextMemOperand(at, 0));
5319 __ lw(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
5320
5321 // Check that value is not the_hole.
5322 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5323 __ Branch(&slow_case, eq, result_reg, Operand(at));
5324 __ Ret();
5325
5326 // Fallback to the runtime.
5327 __ bind(&slow_case);
5328 __ SmiTag(slot_reg);
5329 __ Push(slot_reg);
5330 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5331}
5332
5333
5334void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5335 Register context_reg = cp;
5336 Register slot_reg = a2;
5337 Register value_reg = a0;
5338 Register cell_reg = t0;
5339 Register cell_value_reg = t1;
5340 Register cell_details_reg = t2;
5341 Label fast_heapobject_case, fast_smi_case, slow_case;
5342
5343 if (FLAG_debug_code) {
5344 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
5345 __ Check(ne, kUnexpectedValue, value_reg, Operand(at));
5346 }
5347
5348 // Go up context chain to the script context.
5349 for (int i = 0; i < depth(); ++i) {
5350 __ lw(cell_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
5351 context_reg = cell_reg;
5352 }
5353
5354 // Load the PropertyCell at the specified slot.
Ben Murdoch097c5b22016-05-18 11:27:45 +01005355 __ Lsa(at, context_reg, slot_reg, kPointerSizeLog2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005356 __ lw(cell_reg, ContextMemOperand(at, 0));
5357
5358 // Load PropertyDetails for the cell (actually only the cell_type and kind).
5359 __ lw(cell_details_reg,
5360 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset));
5361 __ SmiUntag(cell_details_reg);
5362 __ And(cell_details_reg, cell_details_reg,
5363 PropertyDetails::PropertyCellTypeField::kMask |
5364 PropertyDetails::KindField::kMask |
5365 PropertyDetails::kAttributesReadOnlyMask);
5366
5367 // Check if PropertyCell holds mutable data.
5368 Label not_mutable_data;
5369 __ Branch(&not_mutable_data, ne, cell_details_reg,
5370 Operand(PropertyDetails::PropertyCellTypeField::encode(
5371 PropertyCellType::kMutable) |
5372 PropertyDetails::KindField::encode(kData)));
5373 __ JumpIfSmi(value_reg, &fast_smi_case);
5374 __ bind(&fast_heapobject_case);
5375 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5376 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5377 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs,
5378 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
5379 // RecordWriteField clobbers the value register, so we need to reload.
5380 __ Ret(USE_DELAY_SLOT);
5381 __ lw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5382 __ bind(&not_mutable_data);
5383
5384 // Check if PropertyCell value matches the new value (relevant for Constant,
5385 // ConstantType and Undefined cells).
5386 Label not_same_value;
5387 __ lw(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5388 __ Branch(&not_same_value, ne, value_reg, Operand(cell_value_reg));
5389 // Make sure the PropertyCell is not marked READ_ONLY.
5390 __ And(at, cell_details_reg, PropertyDetails::kAttributesReadOnlyMask);
5391 __ Branch(&slow_case, ne, at, Operand(zero_reg));
5392 if (FLAG_debug_code) {
5393 Label done;
5394 // This can only be true for Constant, ConstantType and Undefined cells,
5395 // because we never store the_hole via this stub.
5396 __ Branch(&done, eq, cell_details_reg,
5397 Operand(PropertyDetails::PropertyCellTypeField::encode(
5398 PropertyCellType::kConstant) |
5399 PropertyDetails::KindField::encode(kData)));
5400 __ Branch(&done, eq, cell_details_reg,
5401 Operand(PropertyDetails::PropertyCellTypeField::encode(
5402 PropertyCellType::kConstantType) |
5403 PropertyDetails::KindField::encode(kData)));
5404 __ Check(eq, kUnexpectedValue, cell_details_reg,
5405 Operand(PropertyDetails::PropertyCellTypeField::encode(
5406 PropertyCellType::kUndefined) |
5407 PropertyDetails::KindField::encode(kData)));
5408 __ bind(&done);
5409 }
5410 __ Ret();
5411 __ bind(&not_same_value);
5412
5413 // Check if PropertyCell contains data with constant type (and is not
5414 // READ_ONLY).
5415 __ Branch(&slow_case, ne, cell_details_reg,
5416 Operand(PropertyDetails::PropertyCellTypeField::encode(
5417 PropertyCellType::kConstantType) |
5418 PropertyDetails::KindField::encode(kData)));
5419
5420 // Now either both old and new values must be SMIs or both must be heap
5421 // objects with same map.
5422 Label value_is_heap_object;
5423 __ JumpIfNotSmi(value_reg, &value_is_heap_object);
5424 __ JumpIfNotSmi(cell_value_reg, &slow_case);
5425 // Old and new values are SMIs, no need for a write barrier here.
5426 __ bind(&fast_smi_case);
5427 __ Ret(USE_DELAY_SLOT);
5428 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
5429 __ bind(&value_is_heap_object);
5430 __ JumpIfSmi(cell_value_reg, &slow_case);
5431 Register cell_value_map_reg = cell_value_reg;
5432 __ lw(cell_value_map_reg,
5433 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
5434 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
5435 FieldMemOperand(value_reg, HeapObject::kMapOffset));
5436
5437 // Fallback to the runtime.
5438 __ bind(&slow_case);
5439 __ SmiTag(slot_reg);
5440 __ Push(slot_reg, value_reg);
5441 __ TailCallRuntime(is_strict(language_mode())
5442 ? Runtime::kStoreGlobalViaContext_Strict
5443 : Runtime::kStoreGlobalViaContext_Sloppy);
5444}
5445
5446
5447static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5448 return ref0.address() - ref1.address();
5449}
5450
5451
5452// Calls an API function. Allocates HandleScope, extracts returned value
5453// from handle and propagates exceptions. Restores context. stack_space
5454// - space to be unwound on exit (includes the call JS arguments space and
5455// the additional space allocated for the fast call).
5456static void CallApiFunctionAndReturn(
5457 MacroAssembler* masm, Register function_address,
5458 ExternalReference thunk_ref, int stack_space, int32_t stack_space_offset,
5459 MemOperand return_value_operand, MemOperand* context_restore_operand) {
5460 Isolate* isolate = masm->isolate();
5461 ExternalReference next_address =
5462 ExternalReference::handle_scope_next_address(isolate);
5463 const int kNextOffset = 0;
5464 const int kLimitOffset = AddressOffset(
5465 ExternalReference::handle_scope_limit_address(isolate), next_address);
5466 const int kLevelOffset = AddressOffset(
5467 ExternalReference::handle_scope_level_address(isolate), next_address);
5468
5469 DCHECK(function_address.is(a1) || function_address.is(a2));
5470
5471 Label profiler_disabled;
5472 Label end_profiler_check;
5473 __ li(t9, Operand(ExternalReference::is_profiling_address(isolate)));
5474 __ lb(t9, MemOperand(t9, 0));
5475 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
5476
5477 // Additional parameter is the address of the actual callback.
5478 __ li(t9, Operand(thunk_ref));
5479 __ jmp(&end_profiler_check);
5480
5481 __ bind(&profiler_disabled);
5482 __ mov(t9, function_address);
5483 __ bind(&end_profiler_check);
5484
5485 // Allocate HandleScope in callee-save registers.
5486 __ li(s3, Operand(next_address));
5487 __ lw(s0, MemOperand(s3, kNextOffset));
5488 __ lw(s1, MemOperand(s3, kLimitOffset));
5489 __ lw(s2, MemOperand(s3, kLevelOffset));
5490 __ Addu(s2, s2, Operand(1));
5491 __ sw(s2, MemOperand(s3, kLevelOffset));
5492
5493 if (FLAG_log_timer_events) {
5494 FrameScope frame(masm, StackFrame::MANUAL);
5495 __ PushSafepointRegisters();
5496 __ PrepareCallCFunction(1, a0);
5497 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5498 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5499 1);
5500 __ PopSafepointRegisters();
5501 }
5502
5503 // Native call returns to the DirectCEntry stub which redirects to the
5504 // return address pushed on stack (could have moved after GC).
5505 // DirectCEntry stub itself is generated early and never moves.
5506 DirectCEntryStub stub(isolate);
5507 stub.GenerateCall(masm, t9);
5508
5509 if (FLAG_log_timer_events) {
5510 FrameScope frame(masm, StackFrame::MANUAL);
5511 __ PushSafepointRegisters();
5512 __ PrepareCallCFunction(1, a0);
5513 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5514 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5515 1);
5516 __ PopSafepointRegisters();
5517 }
5518
5519 Label promote_scheduled_exception;
5520 Label delete_allocated_handles;
5521 Label leave_exit_frame;
5522 Label return_value_loaded;
5523
5524 // Load value from ReturnValue.
5525 __ lw(v0, return_value_operand);
5526 __ bind(&return_value_loaded);
5527
5528 // No more valid handles (the result handle was the last one). Restore
5529 // previous handle scope.
5530 __ sw(s0, MemOperand(s3, kNextOffset));
5531 if (__ emit_debug_code()) {
5532 __ lw(a1, MemOperand(s3, kLevelOffset));
5533 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
5534 }
5535 __ Subu(s2, s2, Operand(1));
5536 __ sw(s2, MemOperand(s3, kLevelOffset));
5537 __ lw(at, MemOperand(s3, kLimitOffset));
5538 __ Branch(&delete_allocated_handles, ne, s1, Operand(at));
5539
5540 // Leave the API exit frame.
5541 __ bind(&leave_exit_frame);
5542
5543 bool restore_context = context_restore_operand != NULL;
5544 if (restore_context) {
5545 __ lw(cp, *context_restore_operand);
5546 }
5547 if (stack_space_offset != kInvalidStackOffset) {
5548 // ExitFrame contains four MIPS argument slots after DirectCEntryStub call
5549 // so this must be accounted for.
5550 __ lw(s0, MemOperand(sp, stack_space_offset + kCArgsSlotsSize));
5551 } else {
5552 __ li(s0, Operand(stack_space));
5553 }
5554 __ LeaveExitFrame(false, s0, !restore_context, NO_EMIT_RETURN,
5555 stack_space_offset != kInvalidStackOffset);
5556
5557 // Check if the function scheduled an exception.
5558 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
5559 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
5560 __ lw(t1, MemOperand(at));
5561 __ Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
5562
5563 __ Ret();
5564
5565 // Re-throw by promoting a scheduled exception.
5566 __ bind(&promote_scheduled_exception);
5567 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5568
5569 // HandleScope limit has changed. Delete allocated extensions.
5570 __ bind(&delete_allocated_handles);
5571 __ sw(s1, MemOperand(s3, kLimitOffset));
5572 __ mov(s0, v0);
5573 __ mov(a0, v0);
5574 __ PrepareCallCFunction(1, s1);
5575 __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
5576 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5577 1);
5578 __ mov(v0, s0);
5579 __ jmp(&leave_exit_frame);
5580}
5581
Ben Murdochda12d292016-06-02 14:46:10 +01005582void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005583 // ----------- S t a t e -------------
5584 // -- a0 : callee
5585 // -- t0 : call_data
5586 // -- a2 : holder
5587 // -- a1 : api_function_address
5588 // -- cp : context
5589 // --
5590 // -- sp[0] : last argument
5591 // -- ...
5592 // -- sp[(argc - 1)* 4] : first argument
5593 // -- sp[argc * 4] : receiver
5594 // -----------------------------------
5595
5596 Register callee = a0;
5597 Register call_data = t0;
5598 Register holder = a2;
5599 Register api_function_address = a1;
5600 Register context = cp;
5601
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005602 typedef FunctionCallbackArguments FCA;
5603
5604 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5605 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5606 STATIC_ASSERT(FCA::kDataIndex == 4);
5607 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5608 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5609 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5610 STATIC_ASSERT(FCA::kHolderIndex == 0);
5611 STATIC_ASSERT(FCA::kArgsLength == 7);
5612
5613 // Save context, callee and call data.
5614 __ Push(context, callee, call_data);
Ben Murdochda12d292016-06-02 14:46:10 +01005615 if (!is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005616 // Load context from callee.
5617 __ lw(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5618 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005619
5620 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005621 if (!call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005622 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5623 }
5624 // Push return value and default return value.
5625 __ Push(scratch, scratch);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005626 __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005627 // Push isolate and holder.
5628 __ Push(scratch, holder);
5629
5630 // Prepare arguments.
5631 __ mov(scratch, sp);
5632
5633 // Allocate the v8::Arguments structure in the arguments' space since
5634 // it's not controlled by GC.
5635 const int kApiStackSpace = 4;
5636
5637 FrameScope frame_scope(masm, StackFrame::MANUAL);
5638 __ EnterExitFrame(false, kApiStackSpace);
5639
5640 DCHECK(!api_function_address.is(a0) && !scratch.is(a0));
5641 // a0 = FunctionCallbackInfo&
5642 // Arguments is after the return address.
5643 __ Addu(a0, sp, Operand(1 * kPointerSize));
5644 // FunctionCallbackInfo::implicit_args_
5645 __ sw(scratch, MemOperand(a0, 0 * kPointerSize));
Ben Murdochda12d292016-06-02 14:46:10 +01005646 // FunctionCallbackInfo::values_
5647 __ Addu(at, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize));
5648 __ sw(at, MemOperand(a0, 1 * kPointerSize));
5649 // FunctionCallbackInfo::length_ = argc
5650 __ li(at, Operand(argc()));
5651 __ sw(at, MemOperand(a0, 2 * kPointerSize));
5652 // FunctionCallbackInfo::is_construct_call_ = 0
5653 __ sw(zero_reg, MemOperand(a0, 3 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005654
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005655 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005656 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005657
5658 AllowExternalCallThatCantCauseGC scope(masm);
5659 MemOperand context_restore_operand(
5660 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5661 // Stores return the first js argument.
5662 int return_value_offset = 0;
Ben Murdochda12d292016-06-02 14:46:10 +01005663 if (is_store()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005664 return_value_offset = 2 + FCA::kArgsLength;
5665 } else {
5666 return_value_offset = 2 + FCA::kReturnValueOffset;
5667 }
5668 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005669 int stack_space = 0;
5670 int32_t stack_space_offset = 4 * kPointerSize;
Ben Murdochda12d292016-06-02 14:46:10 +01005671 stack_space = argc() + FCA::kArgsLength + 1;
5672 stack_space_offset = kInvalidStackOffset;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005673 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
5674 stack_space_offset, return_value_operand,
5675 &context_restore_operand);
5676}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005677
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005678
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005679void CallApiGetterStub::Generate(MacroAssembler* masm) {
5680 // ----------- S t a t e -------------
Ben Murdoch097c5b22016-05-18 11:27:45 +01005681 // -- sp[0] : name
5682 // -- sp[4 .. (4 + kArgsLength*4)] : v8::PropertyCallbackInfo::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005683 // -- ...
Ben Murdoch097c5b22016-05-18 11:27:45 +01005684 // -- a2 : api_function_address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005685 // -----------------------------------
5686
5687 Register api_function_address = ApiGetterDescriptor::function_address();
5688 DCHECK(api_function_address.is(a2));
5689
Ben Murdoch097c5b22016-05-18 11:27:45 +01005690 // v8::PropertyCallbackInfo::args_ array and name handle.
5691 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5692
5693 // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
5694 __ mov(a0, sp); // a0 = Handle<Name>
5695 __ Addu(a1, a0, Operand(1 * kPointerSize)); // a1 = v8::PCI::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005696
5697 const int kApiStackSpace = 1;
5698 FrameScope frame_scope(masm, StackFrame::MANUAL);
5699 __ EnterExitFrame(false, kApiStackSpace);
5700
Ben Murdoch097c5b22016-05-18 11:27:45 +01005701 // Create v8::PropertyCallbackInfo object on the stack and initialize
5702 // it's args_ field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005703 __ sw(a1, MemOperand(sp, 1 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005704 __ Addu(a1, sp, Operand(1 * kPointerSize)); // a1 = v8::PropertyCallbackInfo&
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005705
5706 ExternalReference thunk_ref =
5707 ExternalReference::invoke_accessor_getter_callback(isolate());
Ben Murdoch097c5b22016-05-18 11:27:45 +01005708
5709 // +3 is to skip prolog, return address and name handle.
5710 MemOperand return_value_operand(
5711 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005712 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5713 kStackUnwindSpace, kInvalidStackOffset,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005714 return_value_operand, NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005715}
5716
5717
Steve Block44f0eee2011-05-26 01:26:41 +01005718#undef __
5719
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005720} // namespace internal
5721} // namespace v8
Steve Block44f0eee2011-05-26 01:26:41 +01005722
5723#endif // V8_TARGET_ARCH_MIPS