blob: 0ef31d7dfeb0cafa977142906b61615a761e5180 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochda12d292016-06-02 14:46:10 +01007#include "src/code-stubs.h"
8#include "src/api-arguments.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/codegen.h"
12#include "src/ic/handler-compiler.h"
13#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000016#include "src/regexp/jsregexp.h"
17#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040018#include "src/runtime/runtime.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010019
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020#include "src/arm/code-stubs-arm.h"
21
Kristian Monsen80d68ea2010-09-08 11:05:35 +010022namespace v8 {
23namespace internal {
24
Ben Murdoch61f157c2016-09-16 13:49:30 +010025#define __ ACCESS_MASM(masm)
Kristian Monsen80d68ea2010-09-08 11:05:35 +010026
Ben Murdoch61f157c2016-09-16 13:49:30 +010027void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
28 __ lsl(r5, r0, Operand(kPointerSizeLog2));
29 __ str(r1, MemOperand(sp, r5));
30 __ Push(r1);
31 __ Push(r2);
32 __ add(r0, r0, Operand(3));
33 __ TailCallRuntime(Runtime::kNewArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034}
35
Ben Murdochda12d292016-06-02 14:46:10 +010036void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
37 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
38 descriptor->Initialize(r0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
39}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040
Ben Murdoch61f157c2016-09-16 13:49:30 +010041void FastFunctionBindStub::InitializeDescriptor(
Ben Murdochb8a8cc12014-11-26 15:28:44 +000042 CodeStubDescriptor* descriptor) {
Ben Murdoch61f157c2016-09-16 13:49:30 +010043 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
44 descriptor->Initialize(r0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045}
46
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +010048 Condition cond);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010049static void EmitSmiNonsmiComparison(MacroAssembler* masm,
50 Register lhs,
51 Register rhs,
52 Label* lhs_not_nan,
53 Label* slow,
54 bool strict);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010055static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
56 Register lhs,
57 Register rhs);
58
59
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
61 ExternalReference miss) {
62 // Update the static counter each time a new code stub is generated.
63 isolate()->counters()->code_stubs()->Increment();
Ben Murdoch257744e2011-11-30 15:57:28 +000064
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000066 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067 {
68 // Call the runtime system in a fresh internal frame.
69 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
70 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 r0.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072 // Push arguments
73 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000074 __ push(descriptor.GetRegisterParameter(i));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010075 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 __ CallExternalReference(miss, param_count);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010077 }
78
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +010080}
81
Kristian Monsen80d68ea2010-09-08 11:05:35 +010082
Ben Murdochb8a8cc12014-11-26 15:28:44 +000083void DoubleToIStub::Generate(MacroAssembler* masm) {
84 Label out_of_range, only_low, negate, done;
85 Register input_reg = source();
86 Register result_reg = destination();
87 DCHECK(is_truncating());
Kristian Monsen80d68ea2010-09-08 11:05:35 +010088
Ben Murdochb8a8cc12014-11-26 15:28:44 +000089 int double_offset = offset();
90 // Account for saved regs if input is sp.
91 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010092
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093 Register scratch = GetRegisterThatIsNotOneOf(input_reg, result_reg);
94 Register scratch_low =
95 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
96 Register scratch_high =
97 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch_low);
98 LowDwVfpRegister double_scratch = kScratchDoubleReg;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010099
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100 __ Push(scratch_high, scratch_low, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100101
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000102 if (!skip_fastpath()) {
103 // Load double input.
104 __ vldr(double_scratch, MemOperand(input_reg, double_offset));
105 __ vmov(scratch_low, scratch_high, double_scratch);
106
107 // Do fast-path convert from double to int.
108 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
109 __ vmov(result_reg, double_scratch.low());
110
111 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
112 __ sub(scratch, result_reg, Operand(1));
113 __ cmp(scratch, Operand(0x7ffffffe));
114 __ b(lt, &done);
115 } else {
116 // We've already done MacroAssembler::TryFastTruncatedDoubleToILoad, so we
117 // know exponent > 31, so we can skip the vcvt_s32_f64 which will saturate.
118 if (double_offset == 0) {
119 __ ldm(ia, input_reg, scratch_low.bit() | scratch_high.bit());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100120 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121 __ ldr(scratch_low, MemOperand(input_reg, double_offset));
122 __ ldr(scratch_high, MemOperand(input_reg, double_offset + kIntSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100123 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100124 }
125
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000126 __ Ubfx(scratch, scratch_high,
127 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
128 // Load scratch with exponent - 1. This is faster than loading
129 // with exponent because Bias + 1 = 1024 which is an *ARM* immediate value.
130 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
131 __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
132 // If exponent is greater than or equal to 84, the 32 less significant
133 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
134 // the result is 0.
135 // Compare exponent with 84 (compare exponent - 1 with 83).
136 __ cmp(scratch, Operand(83));
137 __ b(ge, &out_of_range);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100138
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000139 // If we reach this code, 31 <= exponent <= 83.
140 // So, we don't have to handle cases where 0 <= exponent <= 20 for
141 // which we would need to shift right the high part of the mantissa.
142 // Scratch contains exponent - 1.
143 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
144 __ rsb(scratch, scratch, Operand(51), SetCC);
145 __ b(ls, &only_low);
146 // 21 <= exponent <= 51, shift scratch_low and scratch_high
147 // to generate the result.
148 __ mov(scratch_low, Operand(scratch_low, LSR, scratch));
149 // Scratch contains: 52 - exponent.
150 // We needs: exponent - 20.
151 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
152 __ rsb(scratch, scratch, Operand(32));
153 __ Ubfx(result_reg, scratch_high,
154 0, HeapNumber::kMantissaBitsInTopWord);
155 // Set the implicit 1 before the mantissa part in scratch_high.
156 __ orr(result_reg, result_reg,
157 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
158 __ orr(result_reg, scratch_low, Operand(result_reg, LSL, scratch));
159 __ b(&negate);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100160
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 __ bind(&out_of_range);
162 __ mov(result_reg, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100163 __ b(&done);
164
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165 __ bind(&only_low);
166 // 52 <= exponent <= 83, shift only scratch_low.
167 // On entry, scratch contains: 52 - exponent.
168 __ rsb(scratch, scratch, Operand::Zero());
169 __ mov(result_reg, Operand(scratch_low, LSL, scratch));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100170
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 __ bind(&negate);
172 // If input was positive, scratch_high ASR 31 equals 0 and
173 // scratch_high LSR 31 equals zero.
174 // New result = (result eor 0) + 0 = result.
175 // If the input was negative, we have to negate the result.
176 // Input_high ASR 31 equals 0xffffffff and scratch_high LSR 31 equals 1.
177 // New result = (result eor 0xffffffff) + 1 = 0 - result.
178 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31));
179 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100180
181 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182
183 __ Pop(scratch_high, scratch_low, scratch);
184 __ Ret();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100185}
186
187
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100188// Handle the case where the lhs and rhs are the same object.
189// Equality is almost reflexive (everything but NaN), so this is a test
190// for "identity and not NaN".
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000191static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100192 Condition cond) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100193 Label not_identical;
194 Label heap_number, return_equal;
195 __ cmp(r0, r1);
196 __ b(ne, &not_identical);
197
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000198 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
199 // so we do the second best thing - test it ourselves.
200 // They are both equal and they are not both Smis so both of them are not
201 // Smis. If it's not a heap number, then return equal.
202 if (cond == lt || cond == gt) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 // Call runtime on identical JSObjects.
204 __ CompareObjectType(r0, r4, r4, FIRST_JS_RECEIVER_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000205 __ b(ge, slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000206 // Call runtime on identical symbols since we need to throw a TypeError.
207 __ cmp(r4, Operand(SYMBOL_TYPE));
208 __ b(eq, slow);
209 // Call runtime on identical SIMD values since we must throw a TypeError.
210 __ cmp(r4, Operand(SIMD128_VALUE_TYPE));
211 __ b(eq, slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 } else {
213 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
214 __ b(eq, &heap_number);
215 // Comparing JS objects with <=, >= is complicated.
216 if (cond != eq) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000217 __ cmp(r4, Operand(FIRST_JS_RECEIVER_TYPE));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100218 __ b(ge, slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000219 // Call runtime on identical symbols since we need to throw a TypeError.
220 __ cmp(r4, Operand(SYMBOL_TYPE));
221 __ b(eq, slow);
222 // Call runtime on identical SIMD values since we must throw a TypeError.
223 __ cmp(r4, Operand(SIMD128_VALUE_TYPE));
224 __ b(eq, slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000225 // Normally here we fall through to return_equal, but undefined is
226 // special: (undefined == undefined) == true, but
227 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
228 if (cond == le || cond == ge) {
229 __ cmp(r4, Operand(ODDBALL_TYPE));
230 __ b(ne, &return_equal);
231 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
232 __ cmp(r0, r2);
233 __ b(ne, &return_equal);
234 if (cond == le) {
235 // undefined <= undefined should fail.
236 __ mov(r0, Operand(GREATER));
237 } else {
238 // undefined >= undefined should fail.
239 __ mov(r0, Operand(LESS));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100240 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100242 }
243 }
244 }
245
246 __ bind(&return_equal);
Steve Block1e0659c2011-05-24 12:43:12 +0100247 if (cond == lt) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100248 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves.
Steve Block1e0659c2011-05-24 12:43:12 +0100249 } else if (cond == gt) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100250 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves.
251 } else {
252 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves.
253 }
254 __ Ret();
255
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 // For less and greater we don't have to check for NaN since the result of
257 // x < x is false regardless. For the others here is some code to check
258 // for NaN.
259 if (cond != lt && cond != gt) {
260 __ bind(&heap_number);
261 // It is a heap number, so return non-equal if it's NaN and equal if it's
262 // not NaN.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100263
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000264 // The representation of NaN values has all exponent bits (52..62) set,
265 // and not all mantissa bits (0..51) clear.
266 // Read top bits of double representation (second word of value).
267 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
268 // Test that exponent bits are all set.
269 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
270 // NaNs have all-one exponents so they sign extend to -1.
271 __ cmp(r3, Operand(-1));
272 __ b(ne, &return_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100273
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 // Shift out flag and all exponent bits, retaining only mantissa.
275 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord));
276 // Or with all low-bits of mantissa.
277 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
278 __ orr(r0, r3, Operand(r2), SetCC);
279 // For equal we already have the right value in r0: Return zero (equal)
280 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
281 // not (it's a NaN). For <= and >= we need to load r0 with the failing
282 // value if it's a NaN.
283 if (cond != eq) {
284 // All-zero means Infinity means equal.
285 __ Ret(eq);
286 if (cond == le) {
287 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail.
288 } else {
289 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100290 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100291 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000292 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100293 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000294 // No fall through here.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100295
296 __ bind(&not_identical);
297}
298
299
300// See comment at call site.
301static void EmitSmiNonsmiComparison(MacroAssembler* masm,
302 Register lhs,
303 Register rhs,
304 Label* lhs_not_nan,
305 Label* slow,
306 bool strict) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100308 (lhs.is(r1) && rhs.is(r0)));
309
310 Label rhs_is_smi;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000311 __ JumpIfSmi(rhs, &rhs_is_smi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100312
313 // Lhs is a Smi. Check whether the rhs is a heap number.
314 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE);
315 if (strict) {
316 // If rhs is not a number and lhs is a Smi then strict equality cannot
317 // succeed. Return non-equal
318 // If rhs is r0 then there is already a non zero value in it.
319 if (!rhs.is(r0)) {
320 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
321 }
322 __ Ret(ne);
323 } else {
324 // Smi compared non-strictly with a non-Smi non-heap-number. Call
325 // the runtime.
326 __ b(ne, slow);
327 }
328
329 // Lhs is a smi, rhs is a number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 // Convert lhs to a double in d7.
331 __ SmiToDouble(d7, lhs);
332 // Load the double from rhs, tagged HeapNumber r0, to d6.
333 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100334
335 // We now have both loaded as doubles but we can skip the lhs nan check
336 // since it's a smi.
337 __ jmp(lhs_not_nan);
338
339 __ bind(&rhs_is_smi);
340 // Rhs is a smi. Check whether the non-smi lhs is a heap number.
341 __ CompareObjectType(lhs, r4, r4, HEAP_NUMBER_TYPE);
342 if (strict) {
343 // If lhs is not a number and rhs is a smi then strict equality cannot
344 // succeed. Return non-equal.
345 // If lhs is r0 then there is already a non zero value in it.
346 if (!lhs.is(r0)) {
347 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
348 }
349 __ Ret(ne);
350 } else {
351 // Smi compared non-strictly with a non-smi non-heap-number. Call
352 // the runtime.
353 __ b(ne, slow);
354 }
355
356 // Rhs is a smi, lhs is a heap number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 // Load the double from lhs, tagged HeapNumber r1, to d7.
358 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag);
359 // Convert rhs to a double in d6 .
360 __ SmiToDouble(d6, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100361 // Fall through to both_loaded_as_doubles.
362}
363
364
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100365// See comment at call site.
366static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
367 Register lhs,
368 Register rhs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000369 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100370 (lhs.is(r1) && rhs.is(r0)));
371
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000372 // If either operand is a JS object or an oddball value, then they are
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100373 // not equal since their pointers are different.
374 // There is no test for undetectability in strict equality.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000375 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100376 Label first_non_object;
377 // Get the type of the first operand into r2 and compare it with
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000378 // FIRST_JS_RECEIVER_TYPE.
379 __ CompareObjectType(rhs, r2, r2, FIRST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100380 __ b(lt, &first_non_object);
381
382 // Return non-zero (r0 is not zero)
383 Label return_not_equal;
384 __ bind(&return_not_equal);
385 __ Ret();
386
387 __ bind(&first_non_object);
388 // Check for oddballs: true, false, null, undefined.
389 __ cmp(r2, Operand(ODDBALL_TYPE));
390 __ b(eq, &return_not_equal);
391
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000392 __ CompareObjectType(lhs, r3, r3, FIRST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100393 __ b(ge, &return_not_equal);
394
395 // Check for oddballs: true, false, null, undefined.
396 __ cmp(r3, Operand(ODDBALL_TYPE));
397 __ b(eq, &return_not_equal);
398
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000399 // Now that we have the types we might as well check for
400 // internalized-internalized.
401 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
402 __ orr(r2, r2, Operand(r3));
403 __ tst(r2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
404 __ b(eq, &return_not_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100405}
406
407
408// See comment at call site.
409static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
410 Register lhs,
411 Register rhs,
412 Label* both_loaded_as_doubles,
413 Label* not_heap_numbers,
414 Label* slow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000415 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100416 (lhs.is(r1) && rhs.is(r0)));
417
418 __ CompareObjectType(rhs, r3, r2, HEAP_NUMBER_TYPE);
419 __ b(ne, not_heap_numbers);
420 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
421 __ cmp(r2, r3);
422 __ b(ne, slow); // First was a heap number, second wasn't. Go slow case.
423
424 // Both are heap numbers. Load them up then jump to the code we have
425 // for that.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag);
427 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100428 __ jmp(both_loaded_as_doubles);
429}
430
431
Ben Murdochda12d292016-06-02 14:46:10 +0100432// Fast negative check for internalized-to-internalized equality or receiver
433// equality. Also handles the undetectable receiver to null/undefined
434// comparison.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100436 Register lhs, Register rhs,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437 Label* possible_strings,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100438 Label* runtime_call) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100440 (lhs.is(r1) && rhs.is(r0)));
441
442 // r2 is object type of rhs.
Ben Murdochda12d292016-06-02 14:46:10 +0100443 Label object_test, return_equal, return_unequal, undetectable;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100445 __ tst(r2, Operand(kIsNotStringMask));
446 __ b(ne, &object_test);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000447 __ tst(r2, Operand(kIsNotInternalizedMask));
448 __ b(ne, possible_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100449 __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100450 __ b(ge, runtime_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000451 __ tst(r3, Operand(kIsNotInternalizedMask));
452 __ b(ne, possible_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100453
Ben Murdoch097c5b22016-05-18 11:27:45 +0100454 // Both are internalized. We already checked they weren't the same pointer so
455 // they are not equal. Return non-equal by returning the non-zero object
456 // pointer in r0.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100457 __ Ret();
458
459 __ bind(&object_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100460 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100461 __ ldr(r3, FieldMemOperand(rhs, HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100462 __ ldrb(r4, FieldMemOperand(r2, Map::kBitFieldOffset));
463 __ ldrb(r5, FieldMemOperand(r3, Map::kBitFieldOffset));
464 __ tst(r4, Operand(1 << Map::kIsUndetectable));
465 __ b(ne, &undetectable);
466 __ tst(r5, Operand(1 << Map::kIsUndetectable));
467 __ b(ne, &return_unequal);
468
469 __ CompareInstanceType(r2, r2, FIRST_JS_RECEIVER_TYPE);
470 __ b(lt, runtime_call);
471 __ CompareInstanceType(r3, r3, FIRST_JS_RECEIVER_TYPE);
472 __ b(lt, runtime_call);
473
474 __ bind(&return_unequal);
475 // Return non-equal by returning the non-zero object pointer in r0.
476 __ Ret();
477
478 __ bind(&undetectable);
479 __ tst(r5, Operand(1 << Map::kIsUndetectable));
480 __ b(eq, &return_unequal);
Ben Murdochda12d292016-06-02 14:46:10 +0100481
482 // If both sides are JSReceivers, then the result is false according to
483 // the HTML specification, which says that only comparisons with null or
484 // undefined are affected by special casing for document.all.
485 __ CompareInstanceType(r2, r2, ODDBALL_TYPE);
486 __ b(eq, &return_equal);
487 __ CompareInstanceType(r3, r3, ODDBALL_TYPE);
488 __ b(ne, &return_unequal);
489
490 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100491 __ mov(r0, Operand(EQUAL));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100492 __ Ret();
493}
494
495
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
497 Register scratch,
498 CompareICState::State expected,
499 Label* fail) {
500 Label ok;
501 if (expected == CompareICState::SMI) {
502 __ JumpIfNotSmi(input, fail);
503 } else if (expected == CompareICState::NUMBER) {
504 __ JumpIfSmi(input, &ok);
505 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
506 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100507 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000508 // We could be strict about internalized/non-internalized here, but as long as
509 // hydrogen doesn't care, the stub doesn't have to care either.
510 __ bind(&ok);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100511}
512
513
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514// On entry r1 and r2 are the values to be compared.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100515// On exit r0 is 0, positive or negative to indicate the result of
516// the comparison.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
518 Register lhs = r1;
519 Register rhs = r0;
520 Condition cc = GetCondition();
521
522 Label miss;
523 CompareICStub_CheckInputType(masm, lhs, r2, left(), &miss);
524 CompareICStub_CheckInputType(masm, rhs, r3, right(), &miss);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100525
526 Label slow; // Call builtin.
527 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
528
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529 Label not_two_smis, smi_done;
530 __ orr(r2, r1, r0);
531 __ JumpIfNotSmi(r2, &not_two_smis);
532 __ mov(r1, Operand(r1, ASR, 1));
533 __ sub(r0, r1, Operand(r0, ASR, 1));
534 __ Ret();
535 __ bind(&not_two_smis);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100536
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100537 // NOTICE! This code is only reached after a smi-fast-case check, so
538 // it is certain that at least one operand isn't a smi.
539
540 // Handle the case where the objects are identical. Either returns the answer
541 // or goes to slow. Only falls through if the objects were not identical.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100542 EmitIdenticalObjectComparison(masm, &slow, cc);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100543
544 // If either is a Smi (we know that not both are), then they can only
545 // be strictly equal if the other is a HeapNumber.
546 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000548 __ and_(r2, lhs, Operand(rhs));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000549 __ JumpIfNotSmi(r2, &not_smis);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100550 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
551 // 1) Return the answer.
552 // 2) Go to slow.
553 // 3) Fall through to both_loaded_as_doubles.
554 // 4) Jump to lhs_not_nan.
555 // In cases 3 and 4 we have found out we were dealing with a number-number
556 // comparison. If VFP3 is supported the double values of the numbers have
557 // been loaded into d7 and d6. Otherwise, the double values have been loaded
558 // into r0, r1, r2, and r3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100560
561 __ bind(&both_loaded_as_doubles);
562 // The arguments have been converted to doubles and stored in d6 and d7, if
563 // VFP3 is supported, or in r0, r1, r2, and r3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000564 __ bind(&lhs_not_nan);
565 Label no_nan;
566 // ARMv7 VFP3 instructions to implement double precision comparison.
567 __ VFPCompareAndSetFlags(d7, d6);
568 Label nan;
569 __ b(vs, &nan);
570 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
571 __ mov(r0, Operand(LESS), LeaveCC, lt);
572 __ mov(r0, Operand(GREATER), LeaveCC, gt);
573 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100574
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000575 __ bind(&nan);
576 // If one of the sides was a NaN then the v flag is set. Load r0 with
577 // whatever it takes to make the comparison fail, since comparisons with NaN
578 // always fail.
579 if (cc == lt || cc == le) {
580 __ mov(r0, Operand(GREATER));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100581 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582 __ mov(r0, Operand(LESS));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100583 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000584 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100585
586 __ bind(&not_smis);
587 // At this point we know we are dealing with two different objects,
588 // and neither of them is a Smi. The objects are in rhs_ and lhs_.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100590 // This returns non-equal for some object types, or falls through if it
591 // was not lucky.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100593 }
594
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 Label check_for_internalized_strings;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100596 Label flat_string_check;
597 // Check for heap-number-heap-number comparison. Can jump to slow case,
598 // or load both doubles into r0, r1, r2, r3 and jump to the code that handles
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599 // that case. If the inputs are not doubles then jumps to
600 // check_for_internalized_strings.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100601 // In this case r2 will contain the type of rhs_. Never falls through.
602 EmitCheckForTwoHeapNumbers(masm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603 lhs,
604 rhs,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100605 &both_loaded_as_doubles,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000606 &check_for_internalized_strings,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100607 &flat_string_check);
608
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609 __ bind(&check_for_internalized_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100610 // In the strict case the EmitStrictTwoHeapObjectCompare already took care of
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 // internalized strings.
612 if (cc == eq && !strict()) {
613 // Returns an answer for two internalized strings or two detectable objects.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100614 // Otherwise jumps to string case or not both strings case.
615 // Assumes that r2 is the type of rhs_ on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000616 EmitCheckForInternalizedStringsOrObjects(
617 masm, lhs, rhs, &flat_string_check, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100618 }
619
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000620 // Check for both being sequential one-byte strings,
621 // and inline if that is the case.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100622 __ bind(&flat_string_check);
623
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000624 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r2, r3, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100625
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
627 r3);
628 if (cc == eq) {
629 StringHelper::GenerateFlatOneByteStringEquals(masm, lhs, rhs, r2, r3, r4);
Ben Murdoch257744e2011-11-30 15:57:28 +0000630 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000631 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4,
632 r5);
Ben Murdoch257744e2011-11-30 15:57:28 +0000633 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100634 // Never falls through to here.
635
636 __ bind(&slow);
637
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 if (cc == eq) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100639 {
640 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
641 __ Push(lhs, rhs);
642 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
643 }
644 // Turn true into 0 and false into some non-zero value.
645 STATIC_ASSERT(EQUAL == 0);
646 __ LoadRoot(r1, Heap::kTrueValueRootIndex);
647 __ sub(r0, r0, r1);
648 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100649 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100650 __ Push(lhs, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100651 int ncr; // NaN compare result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 if (cc == lt || cc == le) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100653 ncr = GREATER;
654 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655 DCHECK(cc == gt || cc == ge); // remaining cases
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100656 ncr = LESS;
657 }
658 __ mov(r0, Operand(Smi::FromInt(ncr)));
659 __ push(r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100660
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000661 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
662 // tagged as a small integer.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100663 __ TailCallRuntime(Runtime::kCompare);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100665
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000666 __ bind(&miss);
667 GenerateMiss(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100668}
669
670
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100671void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
672 // We don't allow a GC during a store buffer overflow so there is no need to
673 // store the registers in any particular way, but we do have to store and
674 // restore them.
675 __ stm(db_w, sp, kCallerSaved | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000676
677 const Register scratch = r1;
678
679 if (save_doubles()) {
680 __ SaveFPRegs(sp, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100681 }
682 const int argument_count = 1;
683 const int fp_argument_count = 0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100684
685 AllowExternalCallThatCantCauseGC scope(masm);
686 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000687 __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100688 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000689 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100690 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000691 if (save_doubles()) {
692 __ RestoreFPRegs(sp, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100693 }
694 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0).
695}
696
697
Steve Block44f0eee2011-05-26 01:26:41 +0100698void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100699 const Register base = r1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000700 const Register exponent = MathPowTaggedDescriptor::exponent();
701 DCHECK(exponent.is(r2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100702 const Register heapnumbermap = r5;
703 const Register heapnumber = r0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 const DwVfpRegister double_base = d0;
705 const DwVfpRegister double_exponent = d1;
706 const DwVfpRegister double_result = d2;
707 const DwVfpRegister double_scratch = d3;
708 const SwVfpRegister single_scratch = s6;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100709 const Register scratch = r9;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000710 const Register scratch2 = r4;
Steve Block44f0eee2011-05-26 01:26:41 +0100711
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100712 Label call_runtime, done, int_exponent;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100714 Label base_is_smi, unpack_exponent;
715 // The exponent and base are supplied as arguments on the stack.
716 // This can only happen if the stub is called from non-optimized code.
717 // Load input parameters from stack to double registers.
Steve Block44f0eee2011-05-26 01:26:41 +0100718 __ ldr(base, MemOperand(sp, 1 * kPointerSize));
719 __ ldr(exponent, MemOperand(sp, 0 * kPointerSize));
720
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100721 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +0100722
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100723 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
Steve Block44f0eee2011-05-26 01:26:41 +0100724 __ ldr(scratch, FieldMemOperand(base, JSObject::kMapOffset));
725 __ cmp(scratch, heapnumbermap);
726 __ b(ne, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100727
Ben Murdochc7cc0282012-03-05 14:35:55 +0000728 __ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100729 __ jmp(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000730
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100731 __ bind(&base_is_smi);
732 __ vmov(single_scratch, scratch);
733 __ vcvt_f64_s32(double_base, single_scratch);
734 __ bind(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000735
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100736 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Steve Block44f0eee2011-05-26 01:26:41 +0100737
Steve Block44f0eee2011-05-26 01:26:41 +0100738 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
739 __ cmp(scratch, heapnumbermap);
740 __ b(ne, &call_runtime);
Steve Block44f0eee2011-05-26 01:26:41 +0100741 __ vldr(double_exponent,
742 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000743 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100744 // Base is already in double_base.
745 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Steve Block44f0eee2011-05-26 01:26:41 +0100746
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100747 __ vldr(double_exponent,
748 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000749 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100750
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751 if (exponent_type() != INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100752 Label int_exponent_convert;
753 // Detect integer exponents stored as double.
754 __ vcvt_u32_f64(single_scratch, double_exponent);
755 // We do not check for NaN or Infinity here because comparing numbers on
756 // ARM correctly distinguishes NaNs. We end up calling the built-in.
757 __ vcvt_f64_u32(double_scratch, single_scratch);
758 __ VFPCompareAndSetFlags(double_scratch, double_exponent);
759 __ b(eq, &int_exponent_convert);
760
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000761 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100762 // Detect square root case. Crankshaft detects constant +/-0.5 at
763 // compile time and uses DoMathPowHalf instead. We then skip this check
764 // for non-constant cases of +/-0.5 as these hardly occur.
765 Label not_plus_half;
766
767 // Test for 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000768 __ vmov(double_scratch, 0.5, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100769 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
770 __ b(ne, &not_plus_half);
771
772 // Calculates square root of base. Check for the special case of
773 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 __ vmov(double_scratch, -V8_INFINITY, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100775 __ VFPCompareAndSetFlags(double_base, double_scratch);
776 __ vneg(double_result, double_scratch, eq);
777 __ b(eq, &done);
778
779 // Add +0 to convert -0 to +0.
780 __ vadd(double_scratch, double_base, kDoubleRegZero);
781 __ vsqrt(double_result, double_scratch);
782 __ jmp(&done);
783
784 __ bind(&not_plus_half);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785 __ vmov(double_scratch, -0.5, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100786 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
787 __ b(ne, &call_runtime);
788
789 // Calculates square root of base. Check for the special case of
790 // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791 __ vmov(double_scratch, -V8_INFINITY, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100792 __ VFPCompareAndSetFlags(double_base, double_scratch);
793 __ vmov(double_result, kDoubleRegZero, eq);
794 __ b(eq, &done);
795
796 // Add +0 to convert -0 to +0.
797 __ vadd(double_scratch, double_base, kDoubleRegZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000798 __ vmov(double_result, 1.0, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100799 __ vsqrt(double_scratch, double_scratch);
800 __ vdiv(double_result, double_result, double_scratch);
801 __ jmp(&done);
802 }
803
804 __ push(lr);
805 {
806 AllowExternalCallThatCantCauseGC scope(masm);
807 __ PrepareCallCFunction(0, 2, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000808 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100809 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000810 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100811 0, 2);
812 }
813 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000814 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100815 __ jmp(&done);
816
817 __ bind(&int_exponent_convert);
818 __ vcvt_u32_f64(single_scratch, double_exponent);
819 __ vmov(scratch, single_scratch);
820 }
821
822 // Calculate power with integer exponent.
823 __ bind(&int_exponent);
824
825 // Get two copies of exponent in the registers scratch and exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000826 if (exponent_type() == INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100827 __ mov(scratch, exponent);
828 } else {
829 // Exponent has previously been stored into scratch as untagged integer.
830 __ mov(exponent, scratch);
831 }
832 __ vmov(double_scratch, double_base); // Back up base.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000833 __ vmov(double_result, 1.0, scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100834
835 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000836 __ cmp(scratch, Operand::Zero());
837 __ mov(scratch2, Operand::Zero(), LeaveCC, mi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100838 __ sub(scratch, scratch2, scratch, LeaveCC, mi);
839
840 Label while_true;
841 __ bind(&while_true);
842 __ mov(scratch, Operand(scratch, ASR, 1), SetCC);
843 __ vmul(double_result, double_result, double_scratch, cs);
844 __ vmul(double_scratch, double_scratch, double_scratch, ne);
845 __ b(ne, &while_true);
846
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847 __ cmp(exponent, Operand::Zero());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100848 __ b(ge, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000849 __ vmov(double_scratch, 1.0, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100850 __ vdiv(double_result, double_scratch, double_result);
851 // Test whether result is zero. Bail out to check for subnormal result.
852 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
853 __ VFPCompareAndSetFlags(double_result, 0.0);
854 __ b(ne, &done);
855 // double_exponent may not containe the exponent value if the input was a
856 // smi. We set it with exponent value before bailing out.
857 __ vmov(single_scratch, exponent);
858 __ vcvt_f64_s32(double_exponent, single_scratch);
859
860 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000861 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100862 // The arguments are still on the stack.
863 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000864 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100865
866 // The stub is called from non-optimized code, which expects the result
867 // as heap number in exponent.
868 __ bind(&done);
869 __ AllocateHeapNumber(
870 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
871 __ vstr(double_result,
872 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000873 DCHECK(heapnumber.is(r0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100874 __ Ret(2);
875 } else {
876 __ push(lr);
877 {
878 AllowExternalCallThatCantCauseGC scope(masm);
879 __ PrepareCallCFunction(0, 2, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100881 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000882 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100883 0, 2);
884 }
885 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100887
888 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100889 __ Ret();
890 }
Steve Block44f0eee2011-05-26 01:26:41 +0100891}
892
893
894bool CEntryStub::NeedsImmovableCode() {
895 return true;
896}
897
898
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
900 CEntryStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000901 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
902 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100903 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000904 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000905 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000906 BinaryOpICStub::GenerateAheadOfTime(isolate);
907 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000908 StoreFastElementStub::GenerateAheadOfTime(isolate);
909 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000910}
911
912
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000913void CodeStub::GenerateFPStubs(Isolate* isolate) {
914 // Generate if not already in cache.
915 SaveFPRegsMode mode = kSaveFPRegs;
916 CEntryStub(isolate, 1, mode).GetCode();
917 StoreBufferOverflowStub(isolate, mode).GetCode();
918 isolate->set_fp_stubs_generated(true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100919}
920
921
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000922void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
923 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
924 stub.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100925}
926
927
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928void CEntryStub::Generate(MacroAssembler* masm) {
929 // Called from JavaScript; parameters are on stack as if calling JS function.
930 // r0: number of arguments including receiver
931 // r1: pointer to builtin function
932 // fp: frame pointer (restored after C call)
933 // sp: stack pointer (restored as callee's sp after C call)
934 // cp: current context (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000935 //
936 // If argv_in_register():
937 // r2: pointer to the first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000938 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000939
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 __ mov(r5, Operand(r1));
941
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000942 if (argv_in_register()) {
943 // Move argv into the correct register.
944 __ mov(r1, Operand(r2));
945 } else {
946 // Compute the argv pointer in a callee-saved register.
947 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
948 __ sub(r1, r1, Operand(kPointerSize));
949 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000950
951 // Enter the exit frame that transitions from JavaScript to C++.
952 FrameScope scope(masm, StackFrame::MANUAL);
953 __ EnterExitFrame(save_doubles());
954
955 // Store a copy of argc in callee-saved registers for later.
956 __ mov(r4, Operand(r0));
957
958 // r0, r4: number of arguments including receiver (C callee-saved)
959 // r1: pointer to the first argument (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100960 // r5: pointer to builtin function (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100961
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100962 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
963 int frame_alignment_mask = frame_alignment - 1;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100964#if V8_HOST_ARCH_ARM
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100965 if (FLAG_debug_code) {
966 if (frame_alignment > kPointerSize) {
967 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000968 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +0100969 __ tst(sp, Operand(frame_alignment_mask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100970 __ b(eq, &alignment_as_expected);
971 // Don't use Check here, as it will call Runtime_Abort re-entering here.
972 __ stop("Unexpected alignment");
973 __ bind(&alignment_as_expected);
974 }
975 }
976#endif
977
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000978 // Call C built-in.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100979 int result_stack_size;
980 if (result_size() <= 2) {
981 // r0 = argc, r1 = argv, r2 = isolate
982 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
983 result_stack_size = 0;
984 } else {
985 DCHECK_EQ(3, result_size());
986 // Allocate additional space for the result.
987 result_stack_size =
988 ((result_size() * kPointerSize) + frame_alignment_mask) &
989 ~frame_alignment_mask;
990 __ sub(sp, sp, Operand(result_stack_size));
991
992 // r0 = hidden result argument, r1 = argc, r2 = argv, r3 = isolate.
993 __ mov(r3, Operand(ExternalReference::isolate_address(isolate())));
994 __ mov(r2, Operand(r1));
995 __ mov(r1, Operand(r0));
996 __ mov(r0, Operand(sp));
997 }
Steve Block44f0eee2011-05-26 01:26:41 +0100998
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000999 // To let the GC traverse the return address of the exit frames, we need to
1000 // know where the return address is. The CEntryStub is unmovable, so
1001 // we can store the address on the stack to be able to find it again and
1002 // we never have to restore it, because it will not change.
Steve Block1e0659c2011-05-24 12:43:12 +01001003 // Compute the return address in lr to return to after the jump below. Pc is
1004 // already at '+ 8' from the current instruction but return is after three
1005 // instructions so add another 4 to pc to get the return address.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001006 {
1007 // Prevent literal pool emission before return address.
1008 Assembler::BlockConstPoolScope block_const_pool(masm);
1009 __ add(lr, pc, Operand(4));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001010 __ str(lr, MemOperand(sp, result_stack_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001011 __ Call(r5);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001012 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001013 if (result_size() > 2) {
1014 DCHECK_EQ(3, result_size());
1015 // Read result values stored on stack.
Ben Murdochda12d292016-06-02 14:46:10 +01001016 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1017 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1018 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001019 }
1020 // Result returned in r0, r1:r0 or r2:r1:r0 - do not destroy these registers!
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001021
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001022 // Check result for exception sentinel.
1023 Label exception_returned;
1024 __ CompareRoot(r0, Heap::kExceptionRootIndex);
1025 __ b(eq, &exception_returned);
1026
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001027 // Check that there is no pending exception, otherwise we
1028 // should have returned the exception sentinel.
1029 if (FLAG_debug_code) {
1030 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001031 ExternalReference pending_exception_address(
1032 Isolate::kPendingExceptionAddress, isolate());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001033 __ mov(r3, Operand(pending_exception_address));
1034 __ ldr(r3, MemOperand(r3));
1035 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001036 // Cannot use check here as it attempts to generate call into runtime.
1037 __ b(eq, &okay);
1038 __ stop("Unexpected pending exception");
1039 __ bind(&okay);
1040 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001041
1042 // Exit C frame and return.
1043 // r0:r1: result
1044 // sp: stack pointer
1045 // fp: frame pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001046 Register argc;
1047 if (argv_in_register()) {
1048 // We don't want to pop arguments so set argc to no_reg.
1049 argc = no_reg;
1050 } else {
1051 // Callee-saved register r4 still holds argc.
1052 argc = r4;
1053 }
1054 __ LeaveExitFrame(save_doubles(), argc, true);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001055 __ mov(pc, lr);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001056
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001057 // Handling of exception.
1058 __ bind(&exception_returned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001059
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001060 ExternalReference pending_handler_context_address(
1061 Isolate::kPendingHandlerContextAddress, isolate());
1062 ExternalReference pending_handler_code_address(
1063 Isolate::kPendingHandlerCodeAddress, isolate());
1064 ExternalReference pending_handler_offset_address(
1065 Isolate::kPendingHandlerOffsetAddress, isolate());
1066 ExternalReference pending_handler_fp_address(
1067 Isolate::kPendingHandlerFPAddress, isolate());
1068 ExternalReference pending_handler_sp_address(
1069 Isolate::kPendingHandlerSPAddress, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001070
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001071 // Ask the runtime for help to determine the handler. This will set r0 to
1072 // contain the current pending exception, don't clobber it.
1073 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1074 isolate());
1075 {
1076 FrameScope scope(masm, StackFrame::MANUAL);
1077 __ PrepareCallCFunction(3, 0, r0);
1078 __ mov(r0, Operand(0));
1079 __ mov(r1, Operand(0));
1080 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
1081 __ CallCFunction(find_handler, 3);
1082 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001083
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001084 // Retrieve the handler context, SP and FP.
1085 __ mov(cp, Operand(pending_handler_context_address));
1086 __ ldr(cp, MemOperand(cp));
1087 __ mov(sp, Operand(pending_handler_sp_address));
1088 __ ldr(sp, MemOperand(sp));
1089 __ mov(fp, Operand(pending_handler_fp_address));
1090 __ ldr(fp, MemOperand(fp));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001091
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001092 // If the handler is a JS frame, restore the context to the frame. Note that
1093 // the context will be set to (cp == 0) for non-JS frames.
1094 __ cmp(cp, Operand(0));
1095 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001096
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097 // Compute the handler entry address and jump to it.
1098 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1099 __ mov(r1, Operand(pending_handler_code_address));
1100 __ ldr(r1, MemOperand(r1));
1101 __ mov(r2, Operand(pending_handler_offset_address));
1102 __ ldr(r2, MemOperand(r2));
1103 __ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1104 if (FLAG_enable_embedded_constant_pool) {
1105 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r1);
1106 }
1107 __ add(pc, r1, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001108}
1109
1110
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001111void JSEntryStub::Generate(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001112 // r0: code entry
1113 // r1: function
1114 // r2: receiver
1115 // r3: argc
1116 // [sp+0]: argv
1117
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001118 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001119
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001120 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1121
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001122 // Called from C, so do not pop argc and args on exit (preserve sp)
1123 // No need to save register-passed args
1124 // Save callee-saved registers (incl. cp and fp), sp, and lr
1125 __ stm(db_w, sp, kCalleeSaved | lr.bit());
1126
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001127 // Save callee-saved vfp registers.
1128 __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
1129 // Set up the reserved register for 0.0.
1130 __ vmov(kDoubleRegZero, 0.0);
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001131
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001132 // Get address of argv, see stm above.
1133 // r0: code entry
1134 // r1: function
1135 // r2: receiver
1136 // r3: argc
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001137
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001138 // Set up argv in r4.
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001139 int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001140 offset_to_argv += kNumDoubleCalleeSaved * kDoubleSize;
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001141 __ ldr(r4, MemOperand(sp, offset_to_argv));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001142
1143 // Push a frame with special values setup to mark it as an entry frame.
1144 // r0: code entry
1145 // r1: function
1146 // r2: receiver
1147 // r3: argc
1148 // r4: argv
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001149 int marker = type();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001150 if (FLAG_enable_embedded_constant_pool) {
1151 __ mov(r8, Operand::Zero());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001152 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001153 __ mov(r7, Operand(Smi::FromInt(marker)));
1154 __ mov(r6, Operand(Smi::FromInt(marker)));
Steve Block44f0eee2011-05-26 01:26:41 +01001155 __ mov(r5,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001156 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001157 __ ldr(r5, MemOperand(r5));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001158 __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1159 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160 (FLAG_enable_embedded_constant_pool ? r8.bit() : 0) |
1161 ip.bit());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001162
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001163 // Set up frame pointer for the frame to be pushed.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001164 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1165
Ben Murdochb0fe1622011-05-05 13:52:32 +01001166 // If this is the outermost JS call, set js_entry_sp value.
Steve Block053d10c2011-06-13 19:13:29 +01001167 Label non_outermost_js;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001168 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001169 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
1170 __ ldr(r6, MemOperand(r5));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001171 __ cmp(r6, Operand::Zero());
Steve Block053d10c2011-06-13 19:13:29 +01001172 __ b(ne, &non_outermost_js);
1173 __ str(fp, MemOperand(r5));
1174 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1175 Label cont;
1176 __ b(&cont);
1177 __ bind(&non_outermost_js);
1178 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1179 __ bind(&cont);
1180 __ push(ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001181
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001182 // Jump to a faked try block that does the invoke, with a faked catch
1183 // block that sets the pending exception.
1184 __ jmp(&invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001185
1186 // Block literal pool emission whilst taking the position of the handler
1187 // entry. This avoids making the assumption that literal pools are always
1188 // emitted after an instruction is emitted, rather than before.
1189 {
1190 Assembler::BlockConstPoolScope block_const_pool(masm);
1191 __ bind(&handler_entry);
1192 handler_offset_ = handler_entry.pos();
1193 // Caught exception: Store result (exception) in the pending exception
1194 // field in the JSEnv and return a failure sentinel. Coming in here the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001195 // fp will be invalid because the PushStackHandler below sets it to 0 to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001196 // signal the existence of the JSEntry frame.
1197 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1198 isolate())));
1199 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001200 __ str(r0, MemOperand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201 __ LoadRoot(r0, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001202 __ b(&exit);
1203
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001204 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001205 __ bind(&invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001206 // Must preserve r0-r4, r5-r6 are available.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001207 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001208 // If an exception not caught by another handler occurs, this handler
1209 // returns control to the code after the bl(&invoke) above, which
1210 // restores all kCalleeSaved registers (including cp and fp) to their
1211 // saved values before returning a failure to C.
1212
1213 // Clear any pending exceptions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214 __ mov(r5, Operand(isolate()->factory()->the_hole_value()));
Ben Murdoch589d6972011-11-30 16:04:58 +00001215 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001216 isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001217 __ str(r5, MemOperand(ip));
1218
1219 // Invoke the function by calling through JS entry trampoline builtin.
1220 // Notice that we cannot store a reference to the trampoline code directly in
1221 // this stub, because runtime stubs are not traversed when doing GC.
1222
1223 // Expected registers by Builtins::JSEntryTrampoline
1224 // r0: code entry
1225 // r1: function
1226 // r2: receiver
1227 // r3: argc
1228 // r4: argv
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001229 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01001230 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001231 isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001232 __ mov(ip, Operand(construct_entry));
1233 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001234 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001235 __ mov(ip, Operand(entry));
1236 }
1237 __ ldr(ip, MemOperand(ip)); // deref address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001238 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001239
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001240 // Branch and link to JSEntryTrampoline.
1241 __ Call(ip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001242
Steve Block053d10c2011-06-13 19:13:29 +01001243 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001244 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001245
1246 __ bind(&exit); // r0 holds result
Steve Block053d10c2011-06-13 19:13:29 +01001247 // Check if the current stack frame is marked as the outermost JS frame.
1248 Label non_outermost_js_2;
1249 __ pop(r5);
1250 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1251 __ b(ne, &non_outermost_js_2);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001252 __ mov(r6, Operand::Zero());
Steve Block053d10c2011-06-13 19:13:29 +01001253 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
1254 __ str(r6, MemOperand(r5));
1255 __ bind(&non_outermost_js_2);
Steve Block053d10c2011-06-13 19:13:29 +01001256
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001257 // Restore the top frame descriptors from the stack.
1258 __ pop(r3);
Steve Block44f0eee2011-05-26 01:26:41 +01001259 __ mov(ip,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001260 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001261 __ str(r3, MemOperand(ip));
1262
1263 // Reset the stack to the callee saved registers.
1264 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1265
1266 // Restore callee-saved registers and return.
1267#ifdef DEBUG
1268 if (FLAG_debug_code) {
1269 __ mov(lr, Operand(pc));
1270 }
1271#endif
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001272
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001273 // Restore callee-saved vfp registers.
1274 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001275
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001276 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
1277}
1278
1279
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001280void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1281 Label miss;
1282 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001283 // Ensure that the vector and slot registers won't be clobbered before
1284 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001285 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(),
1286 LoadWithVectorDescriptor::SlotRegister()));
Steve Block1e0659c2011-05-24 12:43:12 +01001287
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001288 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r4,
1289 r5, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001290 __ bind(&miss);
1291 PropertyAccessCompiler::TailCallBuiltin(
1292 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1293}
Steve Block1e0659c2011-05-24 12:43:12 +01001294
1295
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001296void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1297 // Return address is in lr.
1298 Label miss;
1299
1300 Register receiver = LoadDescriptor::ReceiverRegister();
1301 Register index = LoadDescriptor::NameRegister();
1302 Register scratch = r5;
1303 Register result = r0;
1304 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001305 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
1306 result.is(LoadWithVectorDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001307
1308 // StringCharAtGenerator doesn't use the result register until it's passed
1309 // the different miss possibilities. If it did, we would have a conflict
1310 // when FLAG_vector_ics is true.
1311 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1312 &miss, // When not a string.
1313 &miss, // When not a number.
1314 &miss, // When index out of range.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001315 RECEIVER_IS_STRING);
1316 char_at_generator.GenerateFast(masm);
1317 __ Ret();
1318
1319 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001320 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001321
1322 __ bind(&miss);
1323 PropertyAccessCompiler::TailCallBuiltin(
1324 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1325}
1326
1327
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001328void RegExpExecStub::Generate(MacroAssembler* masm) {
1329 // Just jump directly to runtime if native RegExp is not selected at compile
1330 // time or if regexp entry in generated code is turned off runtime switch or
1331 // at compilation.
1332#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001333 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001334#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001335
1336 // Stack frame on entry.
1337 // sp[0]: last_match_info (expected JSArray)
1338 // sp[4]: previous index
1339 // sp[8]: subject string
1340 // sp[12]: JSRegExp object
1341
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001342 const int kLastMatchInfoOffset = 0 * kPointerSize;
1343 const int kPreviousIndexOffset = 1 * kPointerSize;
1344 const int kSubjectOffset = 2 * kPointerSize;
1345 const int kJSRegExpOffset = 3 * kPointerSize;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001346
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001347 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001348 // Allocation of registers for this function. These are in callee save
1349 // registers and will be preserved by the call to the native RegExp code, as
1350 // this code is called using the normal C calling convention. When calling
1351 // directly from generated code the native RegExp code will not do a GC and
1352 // therefore the content of these registers are safe to use after the call.
1353 Register subject = r4;
1354 Register regexp_data = r5;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001355 Register last_match_info_elements = no_reg; // will be r6;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001356
1357 // Ensure that a RegExp stack is allocated.
1358 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001359 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001360 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001361 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001362 __ mov(r0, Operand(address_of_regexp_stack_memory_size));
1363 __ ldr(r0, MemOperand(r0, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001364 __ cmp(r0, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001365 __ b(eq, &runtime);
1366
1367 // Check that the first argument is a JSRegExp object.
1368 __ ldr(r0, MemOperand(sp, kJSRegExpOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001369 __ JumpIfSmi(r0, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001370 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
1371 __ b(ne, &runtime);
1372
1373 // Check that the RegExp has been compiled (data contains a fixed array).
1374 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
1375 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001376 __ SmiTst(regexp_data);
1377 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001378 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001379 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001380 }
1381
1382 // regexp_data: RegExp data (FixedArray)
1383 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1384 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
1385 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
1386 __ b(ne, &runtime);
1387
1388 // regexp_data: RegExp data (FixedArray)
1389 // Check that the number of captures fit in the static offsets vector buffer.
1390 __ ldr(r2,
1391 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 // Check (number_of_captures + 1) * 2 <= offsets vector size
1393 // Or number_of_captures * 2 <= offsets vector size - 2
1394 // Multiplying by 2 comes for free since r2 is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001395 STATIC_ASSERT(kSmiTag == 0);
1396 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001397 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1398 __ cmp(r2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001399 __ b(hi, &runtime);
1400
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001401 // Reset offset for possibly sliced string.
1402 __ mov(r9, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001403 __ ldr(subject, MemOperand(sp, kSubjectOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001404 __ JumpIfSmi(subject, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001405 __ mov(r3, subject); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001406 // subject: subject string
1407 // r3: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001408 // regexp_data: RegExp data (FixedArray)
1409 // Handle subject string according to its encoding and representation:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001410 // (1) Sequential string? If yes, go to (4).
1411 // (2) Sequential or cons? If not, go to (5).
1412 // (3) Cons string. If the string is flat, replace subject with first string
1413 // and go to (1). Otherwise bail out to runtime.
1414 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415 // (E) Carry on.
1416 /// [...]
1417
1418 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001419 // (5) Long external string? If not, go to (7).
1420 // (6) External string. Make it, offset-wise, look like a sequential string.
1421 // Go to (4).
1422 // (7) Short external string or not a string? If yes, bail out to runtime.
1423 // (8) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424
Ben Murdoch097c5b22016-05-18 11:27:45 +01001425 Label seq_string /* 4 */, external_string /* 6 */, check_underlying /* 1 */,
1426 not_seq_nor_cons /* 5 */, not_long_external /* 7 */;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001427
Ben Murdoch097c5b22016-05-18 11:27:45 +01001428 __ bind(&check_underlying);
1429 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
1430 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
1431
1432 // (1) Sequential string? If yes, go to (4).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001433 __ and_(r1,
1434 r0,
1435 Operand(kIsNotStringMask |
1436 kStringRepresentationMask |
1437 kShortExternalStringMask),
1438 SetCC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001439 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001440 __ b(eq, &seq_string); // Go to (4).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001441
Ben Murdoch097c5b22016-05-18 11:27:45 +01001442 // (2) Sequential or cons? If not, go to (5).
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001443 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1444 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001445 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1446 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001447 __ cmp(r1, Operand(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001448 __ b(ge, &not_seq_nor_cons); // Go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001449
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001450 // (3) Cons string. Check that it's flat.
1451 // Replace subject with first string and reload instance type.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001452 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001453 __ CompareRoot(r0, Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001454 __ b(ne, &runtime);
1455 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001456 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001457
Ben Murdoch097c5b22016-05-18 11:27:45 +01001458 // (4) Sequential string. Load regexp code according to encoding.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001459 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001460 // subject: sequential subject string (or look-alike, external string)
1461 // r3: original subject string
1462 // Load previous index and check range before r3 is overwritten. We have to
1463 // use r3 instead of subject here because subject might have been only made
1464 // to look like a sequential string when it actually is an external string.
1465 __ ldr(r1, MemOperand(sp, kPreviousIndexOffset));
1466 __ JumpIfNotSmi(r1, &runtime);
1467 __ ldr(r3, FieldMemOperand(r3, String::kLengthOffset));
1468 __ cmp(r3, Operand(r1));
1469 __ b(ls, &runtime);
1470 __ SmiUntag(r1);
1471
1472 STATIC_ASSERT(4 == kOneByteStringTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001473 STATIC_ASSERT(kTwoByteStringTag == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001474 __ and_(r0, r0, Operand(kStringEncodingMask));
1475 __ mov(r3, Operand(r0, ASR, 2), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001476 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset),
1477 ne);
1478 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001479
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001480 // (E) Carry on. String handling is done.
1481 // r6: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001482 // Check that the irregexp code has been generated for the actual string
1483 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00001484 // a smi (code flushing support).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001485 __ JumpIfSmi(r6, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001486
1487 // r1: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488 // r3: encoding of subject string (1 if one_byte, 0 if two_byte);
1489 // r6: code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001490 // subject: Subject string
1491 // regexp_data: RegExp data (FixedArray)
1492 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001493 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001494
Steve Block44f0eee2011-05-26 01:26:41 +01001495 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001496 const int kRegExpExecuteArguments = 9;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001497 const int kParameterRegisters = 4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001498 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001499
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001500 // Stack pointer now points to cell where return address is to be written.
1501 // Arguments are before that on the stack or in registers.
1502
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001503 // Argument 9 (sp[20]): Pass current isolate address.
1504 __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
1505 __ str(r0, MemOperand(sp, 5 * kPointerSize));
1506
1507 // Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
1508 __ mov(r0, Operand(1));
Steve Block44f0eee2011-05-26 01:26:41 +01001509 __ str(r0, MemOperand(sp, 4 * kPointerSize));
1510
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001511 // Argument 7 (sp[12]): Start (high end) of backtracking stack memory area.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001512 __ mov(r0, Operand(address_of_regexp_stack_memory_address));
1513 __ ldr(r0, MemOperand(r0, 0));
1514 __ mov(r2, Operand(address_of_regexp_stack_memory_size));
1515 __ ldr(r2, MemOperand(r2, 0));
1516 __ add(r0, r0, Operand(r2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001517 __ str(r0, MemOperand(sp, 3 * kPointerSize));
1518
1519 // Argument 6: Set the number of capture registers to zero to force global
1520 // regexps to behave as non-global. This does not affect non-global regexps.
1521 __ mov(r0, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001522 __ str(r0, MemOperand(sp, 2 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001523
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001524 // Argument 5 (sp[4]): static offsets vector buffer.
Steve Block44f0eee2011-05-26 01:26:41 +01001525 __ mov(r0,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001526 Operand(ExternalReference::address_of_static_offsets_vector(
1527 isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001528 __ str(r0, MemOperand(sp, 1 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001529
1530 // For arguments 4 and 3 get string length, calculate start of string data and
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001531 // calculate the shift of the index (0 for one-byte and 1 for two-byte).
1532 __ add(r7, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001533 __ eor(r3, r3, Operand(1));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001534 // Load the length from the original subject string from the previous stack
1535 // frame. Therefore we have to use fp, which points exactly to two pointer
1536 // sizes below the previous sp. (Because creating a new stack frame pushes
1537 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
Ben Murdoch589d6972011-11-30 16:04:58 +00001538 __ ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001539 // If slice offset is not 0, load the length from the original sliced string.
1540 // Argument 4, r3: End of string data
1541 // Argument 3, r2: Start of string data
1542 // Prepare start and end index of the input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001543 __ add(r9, r7, Operand(r9, LSL, r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001544 __ add(r2, r9, Operand(r1, LSL, r3));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001545
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001546 __ ldr(r7, FieldMemOperand(subject, String::kLengthOffset));
1547 __ SmiUntag(r7);
1548 __ add(r3, r9, Operand(r7, LSL, r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001549
1550 // Argument 2 (r1): Previous index.
1551 // Already there
1552
1553 // Argument 1 (r0): Subject string.
Ben Murdoch589d6972011-11-30 16:04:58 +00001554 __ mov(r0, subject);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001555
1556 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001557 __ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1558 DirectCEntryStub stub(isolate());
1559 stub.GenerateCall(masm, r6);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001560
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001561 __ LeaveExitFrame(false, no_reg, true);
1562
1563 last_match_info_elements = r6;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001564
1565 // r0: result
1566 // subject: subject string (callee saved)
1567 // regexp_data: RegExp data (callee saved)
1568 // last_match_info_elements: Last match info elements (callee saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001569 // Check the result.
1570 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001571 __ cmp(r0, Operand(1));
1572 // We expect exactly one result since we force the called regexp to behave
1573 // as non-global.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001574 __ b(eq, &success);
1575 Label failure;
Ben Murdoch589d6972011-11-30 16:04:58 +00001576 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001577 __ b(eq, &failure);
Ben Murdoch589d6972011-11-30 16:04:58 +00001578 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001579 // If not exception it can only be retry. Handle that in the runtime system.
1580 __ b(ne, &runtime);
1581 // Result must now be exception. If there is no pending exception already a
1582 // stack overflow (on the backtrack stack) was detected in RegExp code but
1583 // haven't created the exception yet. Handle that in the runtime system.
1584 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001585 __ mov(r1, Operand(isolate()->factory()->the_hole_value()));
Ben Murdoch589d6972011-11-30 16:04:58 +00001586 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001587 isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001588 __ ldr(r0, MemOperand(r2, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00001589 __ cmp(r0, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001590 __ b(eq, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001591
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001592 // For exception, throw the exception again.
1593 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001594
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001595 __ bind(&failure);
1596 // For failure and exception return null.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001597 __ mov(r0, Operand(isolate()->factory()->null_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001598 __ add(sp, sp, Operand(4 * kPointerSize));
1599 __ Ret();
1600
1601 // Process the result from the native regexp code.
1602 __ bind(&success);
1603 __ ldr(r1,
1604 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
1605 // Calculate number of capture registers (number_of_captures + 1) * 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001606 // Multiplying by 2 comes for free since r1 is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001607 STATIC_ASSERT(kSmiTag == 0);
1608 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1609 __ add(r1, r1, Operand(2)); // r1 was a smi.
1610
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001611 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
1612 __ JumpIfSmi(r0, &runtime);
1613 __ CompareObjectType(r0, r2, r2, JS_ARRAY_TYPE);
1614 __ b(ne, &runtime);
1615 // Check that the JSArray is in fast case.
1616 __ ldr(last_match_info_elements,
1617 FieldMemOperand(r0, JSArray::kElementsOffset));
1618 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
1619 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
1620 __ b(ne, &runtime);
1621 // Check that the last match info has space for the capture registers and the
1622 // additional information.
1623 __ ldr(r0,
1624 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
1625 __ add(r2, r1, Operand(RegExpImpl::kLastMatchOverhead));
1626 __ cmp(r2, Operand::SmiUntag(r0));
1627 __ b(gt, &runtime);
1628
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001629 // r1: number of capture registers
1630 // r4: subject string
1631 // Store the capture count.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001632 __ SmiTag(r2, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001633 __ str(r2, FieldMemOperand(last_match_info_elements,
1634 RegExpImpl::kLastCaptureCountOffset));
1635 // Store last subject and last input.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001636 __ str(subject,
1637 FieldMemOperand(last_match_info_elements,
1638 RegExpImpl::kLastSubjectOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001639 __ mov(r2, subject);
1640 __ RecordWriteField(last_match_info_elements,
1641 RegExpImpl::kLastSubjectOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642 subject,
1643 r3,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001644 kLRHasNotBeenSaved,
1645 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001646 __ mov(subject, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001647 __ str(subject,
1648 FieldMemOperand(last_match_info_elements,
1649 RegExpImpl::kLastInputOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001650 __ RecordWriteField(last_match_info_elements,
1651 RegExpImpl::kLastInputOffset,
1652 subject,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 r3,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001654 kLRHasNotBeenSaved,
1655 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001656
1657 // Get the static offsets vector filled by the native regexp code.
1658 ExternalReference address_of_static_offsets_vector =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001659 ExternalReference::address_of_static_offsets_vector(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001660 __ mov(r2, Operand(address_of_static_offsets_vector));
1661
1662 // r1: number of capture registers
1663 // r2: offsets vector
1664 Label next_capture, done;
1665 // Capture register counter starts from number of capture registers and
1666 // counts down until wraping after zero.
1667 __ add(r0,
1668 last_match_info_elements,
1669 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
1670 __ bind(&next_capture);
1671 __ sub(r1, r1, Operand(1), SetCC);
1672 __ b(mi, &done);
1673 // Read the value from the static offsets vector buffer.
1674 __ ldr(r3, MemOperand(r2, kPointerSize, PostIndex));
1675 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001676 __ SmiTag(r3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001677 __ str(r3, MemOperand(r0, kPointerSize, PostIndex));
1678 __ jmp(&next_capture);
1679 __ bind(&done);
1680
1681 // Return last match info.
1682 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
1683 __ add(sp, sp, Operand(4 * kPointerSize));
1684 __ Ret();
1685
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001686 // Do the runtime call to execute the regexp.
1687 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001689
1690 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001691 // (5) Long external string? If not, go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001692 __ bind(&not_seq_nor_cons);
1693 // Compare flags are still set.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001694 __ b(gt, &not_long_external); // Go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001695
Ben Murdoch097c5b22016-05-18 11:27:45 +01001696 // (6) External string. Make it, offset-wise, look like a sequential string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001697 __ bind(&external_string);
1698 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
1699 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
1700 if (FLAG_debug_code) {
1701 // Assert that we do not have a cons or slice (indirect strings) here.
1702 // Sequential strings have already been ruled out.
1703 __ tst(r0, Operand(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001704 __ Assert(eq, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001705 }
1706 __ ldr(subject,
1707 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
1708 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001709 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001710 __ sub(subject,
1711 subject,
1712 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001713 __ jmp(&seq_string); // Go to (4).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001714
Ben Murdoch097c5b22016-05-18 11:27:45 +01001715 // (7) Short external string or not a string? If yes, bail out to runtime.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001716 __ bind(&not_long_external);
1717 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1718 __ tst(r1, Operand(kIsNotStringMask | kShortExternalStringMask));
1719 __ b(ne, &runtime);
1720
Ben Murdoch097c5b22016-05-18 11:27:45 +01001721 // (8) Sliced string. Replace subject with parent. Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 // Load offset into r9 and replace subject string with parent.
1723 __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
1724 __ SmiUntag(r9);
1725 __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
1726 __ jmp(&check_underlying); // Go to (4).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001727#endif // V8_INTERPRETED_REGEXP
1728}
1729
1730
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001731static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1732 // r0 : number of arguments to the construct function
1733 // r1 : the function to call
1734 // r2 : feedback vector
1735 // r3 : slot in feedback vector (Smi)
1736 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1737
1738 // Number-of-arguments register must be smi-tagged to call out.
1739 __ SmiTag(r0);
1740 __ Push(r3, r2, r1, r0);
1741
1742 __ CallStub(stub);
1743
1744 __ Pop(r3, r2, r1, r0);
1745 __ SmiUntag(r0);
1746}
1747
1748
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001749static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001750 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001751 // are uninitialized, monomorphic (indicated by a JSFunction), and
1752 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753 // r0 : number of arguments to the construct function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001754 // r1 : the function to call
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755 // r2 : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001756 // r3 : slot in feedback vector (Smi)
1757 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001758 Label done_initialize_count, done_increment_count;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001759
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
1761 masm->isolate()->heap()->megamorphic_symbol());
1762 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()),
1763 masm->isolate()->heap()->uninitialized_symbol());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001764
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001765 // Load the cache state into r5.
1766 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
1767 __ ldr(r5, FieldMemOperand(r5, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001768
1769 // A monomorphic cache hit or an already megamorphic state: invoke the
1770 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001771 // We don't know if r5 is a WeakCell or a Symbol, but it's harmless to read at
1772 // this position in a symbol (see static asserts in type-feedback-vector.h).
1773 Label check_allocation_site;
1774 Register feedback_map = r6;
1775 Register weak_value = r9;
1776 __ ldr(weak_value, FieldMemOperand(r5, WeakCell::kValueOffset));
1777 __ cmp(r1, weak_value);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001778 __ b(eq, &done_increment_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001779 __ CompareRoot(r5, Heap::kmegamorphic_symbolRootIndex);
1780 __ b(eq, &done);
1781 __ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
1782 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
1783 __ b(ne, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001784
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001785 // If the weak cell is cleared, we have a new chance to become monomorphic.
1786 __ JumpIfSmi(weak_value, &initialize);
1787 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001789 __ bind(&check_allocation_site);
1790 // If we came here, we need to see if we are the array function.
1791 // If we didn't have a matching function, and we didn't find the megamorph
1792 // sentinel, then we have in the slot either some other function or an
1793 // AllocationSite.
1794 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
1795 __ b(ne, &miss);
1796
1797 // Make sure the function is the Array() function
1798 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
1799 __ cmp(r1, r5);
1800 __ b(ne, &megamorphic);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001801 __ jmp(&done_increment_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001802
1803 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001804
1805 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1806 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001807 __ CompareRoot(r5, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808 __ b(eq, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001809 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1810 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001811 __ bind(&megamorphic);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001812 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001813 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001814 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001815 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001816
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001817 // An uninitialized cache is patched with the function
1818 __ bind(&initialize);
1819
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001820 // Make sure the function is the Array() function
1821 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
1822 __ cmp(r1, r5);
1823 __ b(ne, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001824
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001825 // The target function is the Array constructor,
1826 // Create an AllocationSite if we don't already have it, store it in the
1827 // slot.
1828 CreateAllocationSiteStub create_stub(masm->isolate());
1829 CallStubInRecordCallTarget(masm, &create_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001830 __ b(&done_initialize_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001832 __ bind(&not_array_function);
1833 CreateWeakCellStub weak_cell_stub(masm->isolate());
1834 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch61f157c2016-09-16 13:49:30 +01001835
1836 __ bind(&done_initialize_count);
1837 // Initialize the call counter.
1838 __ Move(r5, Operand(Smi::FromInt(1)));
1839 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
1840 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
1841 __ b(&done);
1842
1843 __ bind(&done_increment_count);
1844
1845 // Increment the call count for monomorphic function calls.
1846 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
1847 __ add(r5, r5, Operand(FixedArray::kHeaderSize + kPointerSize));
1848 __ ldr(r4, FieldMemOperand(r5, 0));
1849 __ add(r4, r4, Operand(Smi::FromInt(1)));
1850 __ str(r4, FieldMemOperand(r5, 0));
1851
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001852 __ bind(&done);
1853}
1854
1855
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001856void CallConstructStub::Generate(MacroAssembler* masm) {
1857 // r0 : number of arguments
1858 // r1 : the function to call
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001859 // r2 : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001860 // r3 : slot in feedback vector (Smi, for RecordCallTarget)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001861
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001862 Label non_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001863 // Check that the function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001864 __ JumpIfSmi(r1, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001865 // Check that the function is a JSFunction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001866 __ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE);
1867 __ b(ne, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001868
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001869 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001870
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001871 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
1872 Label feedback_register_initialized;
1873 // Put the AllocationSite from the feedback vector into r2, or undefined.
1874 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
1875 __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
1876 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
1877 __ b(eq, &feedback_register_initialized);
1878 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1879 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001880
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001881 __ AssertUndefinedOrAllocationSite(r2, r5);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001882
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001883 // Pass function as new target.
1884 __ mov(r3, r1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001885
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001886 // Tail call to the function-specific construct stub (still in the caller
1887 // context at this point).
1888 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1889 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
1890 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001891
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001892 __ bind(&non_function);
1893 __ mov(r3, r1);
1894 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001895}
1896
1897
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001898void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001899 // r1 - function
1900 // r3 - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001901 // r2 - vector
1902 // r4 - allocation site (loaded from vector[slot])
1903 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
1904 __ cmp(r1, r5);
1905 __ b(ne, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906
1907 __ mov(r0, Operand(arg_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001908
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001909 // Increment the call count for monomorphic function calls.
1910 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
1911 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
1912 __ ldr(r3, FieldMemOperand(r2, 0));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001913 __ add(r3, r3, Operand(Smi::FromInt(1)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001914 __ str(r3, FieldMemOperand(r2, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001915
1916 __ mov(r2, r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001917 __ mov(r3, r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001918 ArrayConstructorStub stub(masm->isolate(), arg_count());
1919 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001920}
1921
1922
1923void CallICStub::Generate(MacroAssembler* masm) {
1924 // r1 - function
1925 // r3 - slot id (Smi)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001926 // r2 - vector
1927 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001928 int argc = arg_count();
1929 ParameterCount actual(argc);
1930
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001931 // The checks. First, does r1 match the recorded monomorphic target?
1932 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
1933 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001934
1935 // We don't know that we have a weak cell. We might have a private symbol
1936 // or an AllocationSite, but the memory is safe to examine.
1937 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1938 // FixedArray.
1939 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1940 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1941 // computed, meaning that it can't appear to be a pointer. If the low bit is
1942 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1943 // to be a pointer.
1944 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1945 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1946 WeakCell::kValueOffset &&
1947 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1948
1949 __ ldr(r5, FieldMemOperand(r4, WeakCell::kValueOffset));
1950 __ cmp(r1, r5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001951 __ b(ne, &extra_checks_or_miss);
1952
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001953 // The compare above could have been a SMI/SMI comparison. Guard against this
1954 // convincing us that we have a monomorphic JSFunction.
1955 __ JumpIfSmi(r1, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001956
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001957 // Increment the call count for monomorphic function calls.
1958 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
1959 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
1960 __ ldr(r3, FieldMemOperand(r2, 0));
Ben Murdoch61f157c2016-09-16 13:49:30 +01001961 __ add(r3, r3, Operand(Smi::FromInt(1)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001962 __ str(r3, FieldMemOperand(r2, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001963
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001964 __ bind(&call_function);
1965 __ mov(r0, Operand(argc));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001966 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1967 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001968 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969
1970 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001971 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001972
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001973 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001974 __ b(eq, &call);
1975
1976 // Verify that r4 contains an AllocationSite
1977 __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
1978 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
1979 __ b(ne, &not_allocation_site);
1980
1981 // We have an allocation site.
1982 HandleArrayCase(masm, &miss);
1983
1984 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001985
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001986 // The following cases attempt to handle MISS cases without going to the
1987 // runtime.
1988 if (FLAG_trace_ic) {
1989 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001990 }
1991
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001992 __ CompareRoot(r4, Heap::kuninitialized_symbolRootIndex);
1993 __ b(eq, &uninitialized);
1994
1995 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1996 // to handle it here. More complex cases are dealt with in the runtime.
1997 __ AssertNotSmi(r4);
1998 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1999 __ b(ne, &miss);
2000 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
2001 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
2002 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002003
2004 __ bind(&call);
2005 __ mov(r0, Operand(argc));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002006 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002007 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002008
2009 __ bind(&uninitialized);
2010
2011 // We are going monomorphic, provided we actually have a JSFunction.
2012 __ JumpIfSmi(r1, &miss);
2013
2014 // Goto miss case if we do not have a function.
2015 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
2016 __ b(ne, &miss);
2017
2018 // Make sure the function is not the Array() function, which requires special
2019 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002021 __ cmp(r1, r4);
2022 __ b(eq, &miss);
2023
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002024 // Make sure the function belongs to the same native context.
2025 __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset));
2026 __ ldr(r4, ContextMemOperand(r4, Context::NATIVE_CONTEXT_INDEX));
2027 __ ldr(ip, NativeContextMemOperand());
2028 __ cmp(r4, ip);
2029 __ b(ne, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002030
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002031 // Initialize the call counter.
Ben Murdoch61f157c2016-09-16 13:49:30 +01002032 __ Move(r5, Operand(Smi::FromInt(1)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002033 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002034 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002035
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002036 // Store the function. Use a stub since we need a frame for allocation.
2037 // r2 - vector
2038 // r3 - slot
2039 // r1 - function
2040 {
2041 FrameScope scope(masm, StackFrame::INTERNAL);
2042 CreateWeakCellStub create_stub(masm->isolate());
2043 __ Push(r1);
2044 __ CallStub(&create_stub);
2045 __ Pop(r1);
2046 }
2047
2048 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002049
2050 // We are here because tracing is on or we encountered a MISS case we can't
2051 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002052 __ bind(&miss);
2053 GenerateMiss(masm);
2054
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002055 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056}
2057
2058
2059void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002060 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002061
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002062 // Push the receiver and the function and feedback info.
2063 __ Push(r1, r2, r3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002064
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002065 // Call the entry.
2066 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002067
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002068 // Move result to edi and exit the internal frame.
2069 __ mov(r1, r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002070}
2071
2072
2073// StringCharCodeAtGenerator
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002074void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002075 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002076 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2077 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002078
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002079 // Fetch the instance type of the receiver into result register.
2080 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2081 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2082 // If the receiver is not a string trigger the non-string case.
2083 __ tst(result_, Operand(kIsNotStringMask));
2084 __ b(ne, receiver_not_string_);
2085 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002086
2087 // If the index is non-smi trigger the non-smi case.
Steve Block1e0659c2011-05-24 12:43:12 +01002088 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002089 __ bind(&got_smi_index_);
2090
2091 // Check for index out of range.
2092 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002093 __ cmp(ip, Operand(index_));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002094 __ b(ls, index_out_of_range_);
2095
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002096 __ SmiUntag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002097
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002098 StringCharLoadGenerator::Generate(masm,
2099 object_,
2100 index_,
2101 result_,
2102 &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002103
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002104 __ SmiTag(result_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002105 __ bind(&exit_);
2106}
2107
2108
2109void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002110 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002111 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002112 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002113
2114 // Index is not a smi.
2115 __ bind(&index_not_smi_);
2116 // If index is a heap number, try converting it to an integer.
2117 __ CheckMap(index_,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002118 result_,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002119 Heap::kHeapNumberMapRootIndex,
2120 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00002121 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002122 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 if (embed_mode == PART_OF_IC_HANDLER) {
2124 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2125 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2126 } else {
2127 // index_ is consumed by runtime conversion function.
2128 __ Push(object_, index_);
2129 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01002130 __ CallRuntime(Runtime::kNumberToSmi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002131 // Save the conversion result before the pop instructions below
2132 // have a chance to overwrite it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002133 __ Move(index_, r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002134 if (embed_mode == PART_OF_IC_HANDLER) {
2135 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2136 LoadWithVectorDescriptor::SlotRegister(), object_);
2137 } else {
2138 __ pop(object_);
2139 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002140 // Reload the instance type.
2141 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2142 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2143 call_helper.AfterCall(masm);
2144 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002145 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002146 // Otherwise, return to the fast path.
2147 __ jmp(&got_smi_index_);
2148
2149 // Call runtime. We get here when the receiver is a string and the
2150 // index is a number, but the code of getting the actual character
2151 // is too complex (e.g., when the string needs to be flattened).
2152 __ bind(&call_runtime_);
2153 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002154 __ SmiTag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002155 __ Push(object_, index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002156 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002157 __ Move(result_, r0);
2158 call_helper.AfterCall(masm);
2159 __ jmp(&exit_);
2160
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002161 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002162}
2163
2164
2165// -------------------------------------------------------------------------
2166// StringCharFromCodeGenerator
2167
2168void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2169 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2170 STATIC_ASSERT(kSmiTag == 0);
2171 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002172 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
2173 __ tst(code_, Operand(kSmiTagMask |
2174 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
Steve Block1e0659c2011-05-24 12:43:12 +01002175 __ b(ne, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002176
2177 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002178 // At this point code register contains smi tagged one-byte char code.
2179 __ add(result_, result_, Operand::PointerOffsetFromSmiKey(code_));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002180 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002181 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002182 __ b(eq, &slow_case_);
2183 __ bind(&exit_);
2184}
2185
2186
2187void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002188 MacroAssembler* masm,
2189 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002190 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002191
2192 __ bind(&slow_case_);
2193 call_helper.BeforeCall(masm);
2194 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002195 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002196 __ Move(result_, r0);
2197 call_helper.AfterCall(masm);
2198 __ jmp(&exit_);
2199
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002200 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002201}
2202
2203
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002204enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002205
2206
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002207void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2208 Register dest,
2209 Register src,
2210 Register count,
2211 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002212 String::Encoding encoding) {
2213 if (FLAG_debug_code) {
2214 // Check that destination is word aligned.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002215 __ tst(dest, Operand(kPointerAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002216 __ Check(eq, kDestinationOfCopyNotAligned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002217 }
2218
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002219 // Assumes word reads and writes are little endian.
2220 // Nothing to do for zero characters.
2221 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002222 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002223 __ add(count, count, Operand(count), SetCC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002224 }
2225
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002226 Register limit = count; // Read until dest equals this.
2227 __ add(limit, dest, Operand(count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002228
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002229 Label loop_entry, loop;
2230 // Copy bytes from src to dest until dest hits limit.
2231 __ b(&loop_entry);
2232 __ bind(&loop);
2233 __ ldrb(scratch, MemOperand(src, 1, PostIndex), lt);
2234 __ strb(scratch, MemOperand(dest, 1, PostIndex));
2235 __ bind(&loop_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002236 __ cmp(dest, Operand(limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002237 __ b(lt, &loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002238
2239 __ bind(&done);
2240}
2241
2242
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002243void SubStringStub::Generate(MacroAssembler* masm) {
2244 Label runtime;
2245
2246 // Stack frame on entry.
2247 // lr: return address
2248 // sp[0]: to
2249 // sp[4]: from
2250 // sp[8]: string
2251
2252 // This stub is called from the native-call %_SubString(...), so
2253 // nothing can be assumed about the arguments. It is tested that:
2254 // "string" is a sequential string,
2255 // both "from" and "to" are smis, and
2256 // 0 <= from <= to <= string.length.
2257 // If any of these assumptions fail, we call the runtime system.
2258
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002259 const int kToOffset = 0 * kPointerSize;
2260 const int kFromOffset = 1 * kPointerSize;
2261 const int kStringOffset = 2 * kPointerSize;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002262
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002263 __ Ldrd(r2, r3, MemOperand(sp, kToOffset));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002264 STATIC_ASSERT(kFromOffset == kToOffset + 4);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002265 STATIC_ASSERT(kSmiTag == 0);
2266 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002267
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002268 // Arithmetic shift right by one un-smi-tags. In this case we rotate right
2269 // instead because we bail out on non-smi values: ROR and ASR are equivalent
2270 // for smis but they set the flags in a way that's easier to optimize.
2271 __ mov(r2, Operand(r2, ROR, 1), SetCC);
2272 __ mov(r3, Operand(r3, ROR, 1), SetCC, cc);
2273 // If either to or from had the smi tag bit set, then C is set now, and N
2274 // has the same value: we rotated by 1, so the bottom bit is now the top bit.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002275 // We want to bailout to runtime here if From is negative. In that case, the
2276 // next instruction is not executed and we fall through to bailing out to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002277 // runtime.
2278 // Executed if both r2 and r3 are untagged integers.
2279 __ sub(r2, r2, Operand(r3), SetCC, cc);
2280 // One of the above un-smis or the above SUB could have set N==1.
2281 __ b(mi, &runtime); // Either "from" or "to" is not an smi, or from > to.
Ben Murdoch85b71792012-04-11 18:30:58 +01002282
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002283 // Make sure first argument is a string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002284 __ ldr(r0, MemOperand(sp, kStringOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +00002285 __ JumpIfSmi(r0, &runtime);
2286 Condition is_string = masm->IsObjectStringType(r0, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002287 __ b(NegateCondition(is_string), &runtime);
2288
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002289 Label single_char;
2290 __ cmp(r2, Operand(1));
2291 __ b(eq, &single_char);
2292
Ben Murdoch589d6972011-11-30 16:04:58 +00002293 // Short-cut for the case of trivial substring.
2294 Label return_r0;
2295 // r0: original string
2296 // r2: result string length
2297 __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
2298 __ cmp(r2, Operand(r4, ASR, 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002299 // Return original string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002300 __ b(eq, &return_r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002301 // Longer than original string's length or negative: unsafe arguments.
2302 __ b(hi, &runtime);
2303 // Shorter than original string's length: an actual substring.
Ben Murdoch589d6972011-11-30 16:04:58 +00002304
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002305 // Deal with different string types: update the index if necessary
2306 // and put the underlying string into r5.
2307 // r0: original string
2308 // r1: instance type
2309 // r2: length
2310 // r3: from index (untagged)
2311 Label underlying_unpacked, sliced_string, seq_or_external_string;
2312 // If the string is not indirect, it can only be sequential or external.
2313 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2314 STATIC_ASSERT(kIsIndirectStringMask != 0);
2315 __ tst(r1, Operand(kIsIndirectStringMask));
2316 __ b(eq, &seq_or_external_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002317
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002318 __ tst(r1, Operand(kSlicedNotConsMask));
2319 __ b(ne, &sliced_string);
2320 // Cons string. Check whether it is flat, then fetch first part.
2321 __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002322 __ CompareRoot(r5, Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002323 __ b(ne, &runtime);
2324 __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
2325 // Update instance type.
2326 __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
2327 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
2328 __ jmp(&underlying_unpacked);
Ben Murdoch589d6972011-11-30 16:04:58 +00002329
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002330 __ bind(&sliced_string);
2331 // Sliced string. Fetch parent and correct start index by offset.
2332 __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
2333 __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
2334 __ add(r3, r3, Operand(r4, ASR, 1)); // Add offset to index.
2335 // Update instance type.
2336 __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
2337 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
2338 __ jmp(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002339
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002340 __ bind(&seq_or_external_string);
2341 // Sequential or external string. Just move string to the expected register.
2342 __ mov(r5, r0);
2343
2344 __ bind(&underlying_unpacked);
2345
2346 if (FLAG_string_slices) {
2347 Label copy_routine;
2348 // r5: underlying subject string
2349 // r1: instance type of underlying subject string
2350 // r2: length
2351 // r3: adjusted start index (untagged)
2352 __ cmp(r2, Operand(SlicedString::kMinLength));
2353 // Short slice. Copy instead of slicing.
2354 __ b(lt, &copy_routine);
2355 // Allocate new sliced string. At this point we do not reload the instance
2356 // type including the string encoding because we simply rely on the info
2357 // provided by the original string. It does not matter if the original
2358 // string's encoding is wrong because we always have to recheck encoding of
2359 // the newly created string's parent anyways due to externalized strings.
2360 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002361 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002362 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2363 __ tst(r1, Operand(kStringEncodingMask));
2364 __ b(eq, &two_byte_slice);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002365 __ AllocateOneByteSlicedString(r0, r2, r6, r4, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002366 __ jmp(&set_slice_header);
2367 __ bind(&two_byte_slice);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002368 __ AllocateTwoByteSlicedString(r0, r2, r6, r4, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002369 __ bind(&set_slice_header);
2370 __ mov(r3, Operand(r3, LSL, 1));
2371 __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
2372 __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
2373 __ jmp(&return_r0);
2374
2375 __ bind(&copy_routine);
2376 }
2377
2378 // r5: underlying subject string
2379 // r1: instance type of underlying subject string
2380 // r2: length
2381 // r3: adjusted start index (untagged)
2382 Label two_byte_sequential, sequential_string, allocate_result;
2383 STATIC_ASSERT(kExternalStringTag != 0);
2384 STATIC_ASSERT(kSeqStringTag == 0);
2385 __ tst(r1, Operand(kExternalStringTag));
2386 __ b(eq, &sequential_string);
2387
2388 // Handle external string.
2389 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002390 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002391 __ tst(r1, Operand(kShortExternalStringTag));
2392 __ b(ne, &runtime);
2393 __ ldr(r5, FieldMemOperand(r5, ExternalString::kResourceDataOffset));
2394 // r5 already points to the first character of underlying string.
2395 __ jmp(&allocate_result);
2396
2397 __ bind(&sequential_string);
2398 // Locate first character of underlying subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002399 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2400 __ add(r5, r5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002401
2402 __ bind(&allocate_result);
2403 // Sequential acii string. Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002404 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002405 __ tst(r1, Operand(kStringEncodingMask));
2406 __ b(eq, &two_byte_sequential);
2407
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002408 // Allocate and copy the resulting one-byte string.
2409 __ AllocateOneByteString(r0, r2, r4, r6, r1, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002410
2411 // Locate first character of substring to copy.
2412 __ add(r5, r5, r3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002413 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002414 __ add(r1, r0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002415
Ben Murdoch589d6972011-11-30 16:04:58 +00002416 // r0: result string
2417 // r1: first character of result string
2418 // r2: result string length
2419 // r5: first character of substring to copy
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002420 STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2421 StringHelper::GenerateCopyCharacters(
2422 masm, r1, r5, r2, r3, String::ONE_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002423 __ jmp(&return_r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002424
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002425 // Allocate and copy the resulting two-byte string.
2426 __ bind(&two_byte_sequential);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002427 __ AllocateTwoByteString(r0, r2, r4, r6, r1, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002428
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002429 // Locate first character of substring to copy.
Ben Murdoch589d6972011-11-30 16:04:58 +00002430 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002431 __ add(r5, r5, Operand(r3, LSL, 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002432 // Locate first character of result.
2433 __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch589d6972011-11-30 16:04:58 +00002434
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002435 // r0: result string.
2436 // r1: first character of result.
2437 // r2: result length.
Ben Murdoch589d6972011-11-30 16:04:58 +00002438 // r5: first character of substring to copy.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002439 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002440 StringHelper::GenerateCopyCharacters(
2441 masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002442
2443 __ bind(&return_r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002444 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +01002445 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002446 __ Drop(3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002447 __ Ret();
2448
2449 // Just jump to runtime to create the sub string.
2450 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002451 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002452
2453 __ bind(&single_char);
2454 // r0: original string
2455 // r1: instance type
2456 // r2: length
2457 // r3: from index (untagged)
2458 __ SmiTag(r3, r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002459 StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime,
Ben Murdoch61f157c2016-09-16 13:49:30 +01002460 RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002461 generator.GenerateFast(masm);
2462 __ Drop(3);
2463 __ Ret();
2464 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002465}
2466
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002467void ToStringStub::Generate(MacroAssembler* masm) {
2468 // The ToString stub takes one argument in r0.
2469 Label is_number;
2470 __ JumpIfSmi(r0, &is_number);
2471
2472 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
2473 // r0: receiver
2474 // r1: receiver instance type
2475 __ Ret(lo);
2476
2477 Label not_heap_number;
2478 __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
2479 __ b(ne, &not_heap_number);
2480 __ bind(&is_number);
2481 NumberToStringStub stub(isolate());
2482 __ TailCallStub(&stub);
2483 __ bind(&not_heap_number);
2484
2485 Label not_oddball;
2486 __ cmp(r1, Operand(ODDBALL_TYPE));
2487 __ b(ne, &not_oddball);
2488 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
2489 __ Ret();
2490 __ bind(&not_oddball);
2491
2492 __ push(r0); // Push argument.
2493 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002494}
2495
2496
Ben Murdoch097c5b22016-05-18 11:27:45 +01002497void ToNameStub::Generate(MacroAssembler* masm) {
2498 // The ToName stub takes one argument in r0.
2499 Label is_number;
2500 __ JumpIfSmi(r0, &is_number);
2501
2502 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2503 __ CompareObjectType(r0, r1, r1, LAST_NAME_TYPE);
2504 // r0: receiver
2505 // r1: receiver instance type
2506 __ Ret(ls);
2507
2508 Label not_heap_number;
2509 __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
2510 __ b(ne, &not_heap_number);
2511 __ bind(&is_number);
2512 NumberToStringStub stub(isolate());
2513 __ TailCallStub(&stub);
2514 __ bind(&not_heap_number);
2515
2516 Label not_oddball;
2517 __ cmp(r1, Operand(ODDBALL_TYPE));
2518 __ b(ne, &not_oddball);
2519 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
2520 __ Ret();
2521 __ bind(&not_oddball);
2522
2523 __ push(r0); // Push argument.
2524 __ TailCallRuntime(Runtime::kToName);
2525}
2526
2527
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002528void StringHelper::GenerateFlatOneByteStringEquals(
2529 MacroAssembler* masm, Register left, Register right, Register scratch1,
2530 Register scratch2, Register scratch3) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002531 Register length = scratch1;
2532
2533 // Compare lengths.
2534 Label strings_not_equal, check_zero_length;
2535 __ ldr(length, FieldMemOperand(left, String::kLengthOffset));
2536 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
2537 __ cmp(length, scratch2);
2538 __ b(eq, &check_zero_length);
2539 __ bind(&strings_not_equal);
2540 __ mov(r0, Operand(Smi::FromInt(NOT_EQUAL)));
2541 __ Ret();
2542
2543 // Check if the length is zero.
2544 Label compare_chars;
2545 __ bind(&check_zero_length);
2546 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002547 __ cmp(length, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00002548 __ b(ne, &compare_chars);
2549 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
2550 __ Ret();
2551
2552 // Compare characters.
2553 __ bind(&compare_chars);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002554 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, scratch3,
2555 &strings_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002556
2557 // Characters are equal.
2558 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
2559 __ Ret();
2560}
2561
2562
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002563void StringHelper::GenerateCompareFlatOneByteStrings(
2564 MacroAssembler* masm, Register left, Register right, Register scratch1,
2565 Register scratch2, Register scratch3, Register scratch4) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002566 Label result_not_equal, compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002567 // Find minimum length and length difference.
2568 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset));
2569 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
2570 __ sub(scratch3, scratch1, Operand(scratch2), SetCC);
2571 Register length_delta = scratch3;
2572 __ mov(scratch1, scratch2, LeaveCC, gt);
2573 Register min_length = scratch1;
2574 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002575 __ cmp(min_length, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002576 __ b(eq, &compare_lengths);
2577
Ben Murdoch257744e2011-11-30 15:57:28 +00002578 // Compare loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002579 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2580 scratch4, &result_not_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002581
Ben Murdoch257744e2011-11-30 15:57:28 +00002582 // Compare lengths - strings up to min-length are equal.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002583 __ bind(&compare_lengths);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002584 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002585 // Use length_delta as result if it's zero.
2586 __ mov(r0, Operand(length_delta), SetCC);
2587 __ bind(&result_not_equal);
2588 // Conditionally update the result based either on length_delta or
2589 // the last comparion performed in the loop above.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002590 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt);
2591 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt);
2592 __ Ret();
2593}
2594
2595
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002596void StringHelper::GenerateOneByteCharsCompareLoop(
2597 MacroAssembler* masm, Register left, Register right, Register length,
2598 Register scratch1, Register scratch2, Label* chars_not_equal) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002599 // Change index to run from -length to -1 by adding length to string
2600 // start. This means that loop ends when index reaches zero, which
2601 // doesn't need an additional compare.
2602 __ SmiUntag(length);
2603 __ add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002604 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002605 __ add(left, left, Operand(scratch1));
2606 __ add(right, right, Operand(scratch1));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002607 __ rsb(length, length, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00002608 Register index = length; // index = -length;
2609
2610 // Compare loop.
2611 Label loop;
2612 __ bind(&loop);
2613 __ ldrb(scratch1, MemOperand(left, index));
2614 __ ldrb(scratch2, MemOperand(right, index));
2615 __ cmp(scratch1, scratch2);
2616 __ b(ne, chars_not_equal);
2617 __ add(index, index, Operand(1), SetCC);
2618 __ b(ne, &loop);
2619}
2620
2621
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002622void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2623 // ----------- S t a t e -------------
2624 // -- r1 : left
2625 // -- r0 : right
2626 // -- lr : return address
2627 // -----------------------------------
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002628
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002629 // Load r2 with the allocation site. We stick an undefined dummy value here
2630 // and replace it with the real allocation site later when we instantiate this
2631 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
Ben Murdoch61f157c2016-09-16 13:49:30 +01002632 __ Move(r2, isolate()->factory()->undefined_value());
Steve Block44f0eee2011-05-26 01:26:41 +01002633
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002634 // Make sure that we actually patched the allocation site.
2635 if (FLAG_debug_code) {
2636 __ tst(r2, Operand(kSmiTagMask));
2637 __ Assert(ne, kExpectedAllocationSite);
2638 __ push(r2);
2639 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2640 __ LoadRoot(ip, Heap::kAllocationSiteMapRootIndex);
2641 __ cmp(r2, ip);
2642 __ pop(r2);
2643 __ Assert(eq, kExpectedAllocationSite);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002644 }
2645
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 // Tail call into the stub that handles binary operations with allocation
2647 // sites.
2648 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2649 __ TailCallStub(&stub);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002650}
2651
2652
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002653void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2654 DCHECK_EQ(CompareICState::BOOLEAN, state());
2655 Label miss;
2656
2657 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
2658 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002659 if (!Token::IsEqualityOp(op())) {
2660 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset));
2661 __ AssertSmi(r1);
2662 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
2663 __ AssertSmi(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002664 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002665 __ sub(r0, r1, r0);
2666 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002667
2668 __ bind(&miss);
2669 GenerateMiss(masm);
2670}
2671
2672
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002673void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2674 DCHECK(state() == CompareICState::SMI);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002675 Label miss;
2676 __ orr(r2, r1, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002677 __ JumpIfNotSmi(r2, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002678
2679 if (GetCondition() == eq) {
2680 // For equality we do not care about the sign of the result.
2681 __ sub(r0, r0, r1, SetCC);
2682 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01002683 // Untag before subtracting to avoid handling overflow.
2684 __ SmiUntag(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002685 __ sub(r0, r1, Operand::SmiUntag(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002686 }
2687 __ Ret();
2688
2689 __ bind(&miss);
2690 GenerateMiss(masm);
2691}
2692
2693
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002694void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2695 DCHECK(state() == CompareICState::NUMBER);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002696
2697 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002698 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002699 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002700
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002701 if (left() == CompareICState::SMI) {
2702 __ JumpIfNotSmi(r1, &miss);
2703 }
2704 if (right() == CompareICState::SMI) {
2705 __ JumpIfNotSmi(r0, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002706 }
2707
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002708 // Inlining the double comparison and falling back to the general compare
2709 // stub if NaN is involved.
2710 // Load left and right operand.
2711 Label done, left, left_smi, right_smi;
2712 __ JumpIfSmi(r0, &right_smi);
2713 __ CheckMap(r0, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
2714 DONT_DO_SMI_CHECK);
2715 __ sub(r2, r0, Operand(kHeapObjectTag));
2716 __ vldr(d1, r2, HeapNumber::kValueOffset);
2717 __ b(&left);
2718 __ bind(&right_smi);
2719 __ SmiToDouble(d1, r0);
2720
2721 __ bind(&left);
2722 __ JumpIfSmi(r1, &left_smi);
2723 __ CheckMap(r1, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
2724 DONT_DO_SMI_CHECK);
2725 __ sub(r2, r1, Operand(kHeapObjectTag));
2726 __ vldr(d0, r2, HeapNumber::kValueOffset);
2727 __ b(&done);
2728 __ bind(&left_smi);
2729 __ SmiToDouble(d0, r1);
2730
2731 __ bind(&done);
2732 // Compare operands.
2733 __ VFPCompareAndSetFlags(d0, d1);
2734
2735 // Don't base result on status bits when a NaN is involved.
2736 __ b(vs, &unordered);
2737
2738 // Return a result of -1, 0, or 1, based on status bits.
2739 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
2740 __ mov(r0, Operand(LESS), LeaveCC, lt);
2741 __ mov(r0, Operand(GREATER), LeaveCC, gt);
2742 __ Ret();
2743
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002744 __ bind(&unordered);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002745 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002746 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002747 CompareICState::GENERIC, CompareICState::GENERIC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002748 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2749
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002750 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002751 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002752 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
2753 __ b(ne, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002754 __ JumpIfSmi(r1, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002755 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
2756 __ b(ne, &maybe_undefined2);
2757 __ jmp(&unordered);
2758 }
2759
2760 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002761 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002762 __ CompareRoot(r1, Heap::kUndefinedValueRootIndex);
2763 __ b(eq, &unordered);
2764 }
2765
Ben Murdochb0fe1622011-05-05 13:52:32 +01002766 __ bind(&miss);
2767 GenerateMiss(masm);
2768}
2769
2770
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002771void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2772 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00002773 Label miss;
2774
2775 // Registers containing left and right operands respectively.
2776 Register left = r1;
2777 Register right = r0;
2778 Register tmp1 = r2;
2779 Register tmp2 = r3;
2780
2781 // Check that both operands are heap objects.
2782 __ JumpIfEitherSmi(left, right, &miss);
2783
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002784 // Check that both operands are internalized strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00002785 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
2786 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
2787 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
2788 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002789 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2790 __ orr(tmp1, tmp1, Operand(tmp2));
2791 __ tst(tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask));
2792 __ b(ne, &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00002793
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002794 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00002795 __ cmp(left, right);
2796 // Make sure r0 is non-zero. At this point input operands are
2797 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002798 DCHECK(right.is(r0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002799 STATIC_ASSERT(EQUAL == 0);
2800 STATIC_ASSERT(kSmiTag == 0);
2801 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
2802 __ Ret();
2803
2804 __ bind(&miss);
2805 GenerateMiss(masm);
2806}
2807
2808
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002809void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2810 DCHECK(state() == CompareICState::UNIQUE_NAME);
2811 DCHECK(GetCondition() == eq);
Ben Murdoch257744e2011-11-30 15:57:28 +00002812 Label miss;
2813
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002814 // Registers containing left and right operands respectively.
2815 Register left = r1;
2816 Register right = r0;
2817 Register tmp1 = r2;
2818 Register tmp2 = r3;
2819
2820 // Check that both operands are heap objects.
2821 __ JumpIfEitherSmi(left, right, &miss);
2822
2823 // Check that both operands are unique names. This leaves the instance
2824 // types loaded in tmp1 and tmp2.
2825 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
2826 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
2827 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
2828 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
2829
2830 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
2831 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
2832
2833 // Unique names are compared by identity.
2834 __ cmp(left, right);
2835 // Make sure r0 is non-zero. At this point input operands are
2836 // guaranteed to be non-zero.
2837 DCHECK(right.is(r0));
2838 STATIC_ASSERT(EQUAL == 0);
2839 STATIC_ASSERT(kSmiTag == 0);
2840 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
2841 __ Ret();
2842
2843 __ bind(&miss);
2844 GenerateMiss(masm);
2845}
2846
2847
2848void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2849 DCHECK(state() == CompareICState::STRING);
2850 Label miss;
2851
2852 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002853
Ben Murdoch257744e2011-11-30 15:57:28 +00002854 // Registers containing left and right operands respectively.
2855 Register left = r1;
2856 Register right = r0;
2857 Register tmp1 = r2;
2858 Register tmp2 = r3;
2859 Register tmp3 = r4;
2860 Register tmp4 = r5;
2861
2862 // Check that both operands are heap objects.
2863 __ JumpIfEitherSmi(left, right, &miss);
2864
2865 // Check that both operands are strings. This leaves the instance
2866 // types loaded in tmp1 and tmp2.
2867 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
2868 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
2869 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
2870 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
2871 STATIC_ASSERT(kNotStringTag != 0);
2872 __ orr(tmp3, tmp1, tmp2);
2873 __ tst(tmp3, Operand(kIsNotStringMask));
2874 __ b(ne, &miss);
2875
2876 // Fast check for identical strings.
2877 __ cmp(left, right);
2878 STATIC_ASSERT(EQUAL == 0);
2879 STATIC_ASSERT(kSmiTag == 0);
2880 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
2881 __ Ret(eq);
2882
2883 // Handle not identical strings.
2884
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002885 // Check that both strings are internalized strings. If they are, we're done
2886 // because we already know they are not identical. We know they are both
2887 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002888 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002889 DCHECK(GetCondition() == eq);
2890 STATIC_ASSERT(kInternalizedTag == 0);
2891 __ orr(tmp3, tmp1, Operand(tmp2));
2892 __ tst(tmp3, Operand(kIsNotInternalizedMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002893 // Make sure r0 is non-zero. At this point input operands are
2894 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002895 DCHECK(right.is(r0));
2896 __ Ret(eq);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002897 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002898
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002899 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00002900 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002901 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
2902 &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00002903
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002904 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002905 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002906 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, tmp2,
2907 tmp3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002908 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002909 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
2910 tmp2, tmp3, tmp4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002911 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002912
2913 // Handle more complex cases in runtime.
2914 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002915 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01002916 {
2917 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2918 __ Push(left, right);
2919 __ CallRuntime(Runtime::kStringEqual);
2920 }
2921 __ LoadRoot(r1, Heap::kTrueValueRootIndex);
2922 __ sub(r0, r0, r1);
2923 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002924 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01002925 __ Push(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002926 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002927 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002928
2929 __ bind(&miss);
2930 GenerateMiss(masm);
2931}
2932
2933
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002934void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2935 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002936 Label miss;
2937 __ and_(r2, r1, Operand(r0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002938 __ JumpIfSmi(r2, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002939
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002940 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2941 __ CompareObjectType(r0, r2, r2, FIRST_JS_RECEIVER_TYPE);
2942 __ b(lt, &miss);
2943 __ CompareObjectType(r1, r2, r2, FIRST_JS_RECEIVER_TYPE);
2944 __ b(lt, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002945
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002946 DCHECK(GetCondition() == eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002947 __ sub(r0, r0, Operand(r1));
2948 __ Ret();
2949
2950 __ bind(&miss);
2951 GenerateMiss(masm);
2952}
2953
2954
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002955void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002956 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002957 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002958 __ and_(r2, r1, Operand(r0));
2959 __ JumpIfSmi(r2, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002960 __ GetWeakValue(r4, cell);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002961 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2962 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002963 __ cmp(r2, r4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002964 __ b(ne, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002965 __ cmp(r3, r4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002966 __ b(ne, &miss);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002967
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002968 if (Token::IsEqualityOp(op())) {
2969 __ sub(r0, r0, Operand(r1));
2970 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002971 } else {
2972 if (op() == Token::LT || op() == Token::LTE) {
2973 __ mov(r2, Operand(Smi::FromInt(GREATER)));
2974 } else {
2975 __ mov(r2, Operand(Smi::FromInt(LESS)));
2976 }
2977 __ Push(r1, r0, r2);
2978 __ TailCallRuntime(Runtime::kCompare);
2979 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002980
2981 __ bind(&miss);
2982 GenerateMiss(masm);
2983}
2984
2985
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002986void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002987 {
2988 // Call the runtime system in a fresh internal frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002989 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002990 __ Push(r1, r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002991 __ Push(lr, r1, r0);
2992 __ mov(ip, Operand(Smi::FromInt(op())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002993 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002994 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002995 // Compute the entry point of the rewritten stub.
2996 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
2997 // Restore registers.
2998 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002999 __ Pop(r1, r0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003000 }
3001
Ben Murdochb0fe1622011-05-05 13:52:32 +01003002 __ Jump(r2);
3003}
3004
3005
Steve Block1e0659c2011-05-24 12:43:12 +01003006void DirectCEntryStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003007 // Place the return address on the stack, making the call
3008 // GC safe. The RegExp backend also relies on this.
3009 __ str(lr, MemOperand(sp, 0));
3010 __ blx(ip); // Call the C++ function.
Steve Block1e0659c2011-05-24 12:43:12 +01003011 __ ldr(pc, MemOperand(sp, 0));
3012}
3013
3014
3015void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003016 Register target) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003017 intptr_t code =
3018 reinterpret_cast<intptr_t>(GetCode().location());
3019 __ Move(ip, target);
3020 __ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
3021 __ blx(lr); // Call the stub.
Steve Block1e0659c2011-05-24 12:43:12 +01003022}
3023
3024
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003025void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3026 Label* miss,
3027 Label* done,
3028 Register receiver,
3029 Register properties,
3030 Handle<Name> name,
3031 Register scratch0) {
3032 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003033 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3034 // not equal to the name and kProbes-th slot is not used (its name is the
3035 // undefined value), it guarantees the hash table doesn't contain the
3036 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003037 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003038 for (int i = 0; i < kInlinedProbes; i++) {
3039 // scratch0 points to properties hash.
3040 // Compute the masked index: (hash + i + i * i) & mask.
3041 Register index = scratch0;
3042 // Capacity is smi 2^n.
3043 __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
3044 __ sub(index, index, Operand(1));
3045 __ and_(index, index, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003046 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))));
Ben Murdoch257744e2011-11-30 15:57:28 +00003047
3048 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003049 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003050 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
3051
3052 Register entity_name = scratch0;
3053 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003054 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003055 Register tmp = properties;
3056 __ add(tmp, properties, Operand(index, LSL, 1));
3057 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
3058
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003059 DCHECK(!tmp.is(entity_name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003060 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3061 __ cmp(entity_name, tmp);
3062 __ b(eq, done);
3063
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003064 // Load the hole ready for use below:
3065 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003066
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003067 // Stop if found the property.
3068 __ cmp(entity_name, Operand(Handle<Name>(name)));
3069 __ b(eq, miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003070
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003071 Label good;
3072 __ cmp(entity_name, tmp);
3073 __ b(eq, &good);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003074
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003075 // Check if the entry name is not a unique name.
3076 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
3077 __ ldrb(entity_name,
3078 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
3079 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3080 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003081
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003082 // Restore the properties.
3083 __ ldr(properties,
3084 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003085 }
3086
3087 const int spill_mask =
3088 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() |
3089 r2.bit() | r1.bit() | r0.bit());
3090
3091 __ stm(db_w, sp, spill_mask);
3092 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003093 __ mov(r1, Operand(Handle<Name>(name)));
3094 NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003095 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003096 __ cmp(r0, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003097 __ ldm(ia_w, sp, spill_mask);
3098
3099 __ b(eq, done);
3100 __ b(ne, miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003101}
3102
3103
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003104// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003105// |done| label if a property with the given name is found. Jump to
3106// the |miss| label otherwise.
3107// If lookup was successful |scratch2| will be equal to elements + 4 * index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003108void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3109 Label* miss,
3110 Label* done,
3111 Register elements,
3112 Register name,
3113 Register scratch1,
3114 Register scratch2) {
3115 DCHECK(!elements.is(scratch1));
3116 DCHECK(!elements.is(scratch2));
3117 DCHECK(!name.is(scratch1));
3118 DCHECK(!name.is(scratch2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003119
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003120 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003121
3122 // Compute the capacity mask.
3123 __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003124 __ SmiUntag(scratch1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003125 __ sub(scratch1, scratch1, Operand(1));
3126
3127 // Generate an unrolled loop that performs a few probes before
3128 // giving up. Measurements done on Gmail indicate that 2 probes
3129 // cover ~93% of loads from dictionaries.
3130 for (int i = 0; i < kInlinedProbes; i++) {
3131 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003132 __ ldr(scratch2, FieldMemOperand(name, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003133 if (i > 0) {
3134 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3135 // the hash in a separate instruction. The value hash + i + i * i is right
3136 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003137 DCHECK(NameDictionary::GetProbeOffset(i) <
3138 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003139 __ add(scratch2, scratch2, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003140 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003141 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003142 __ and_(scratch2, scratch1, Operand(scratch2, LSR, Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003143
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003144 // Scale the index by multiplying by the entry size.
3145 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003146 // scratch2 = scratch2 * 3.
3147 __ add(scratch2, scratch2, Operand(scratch2, LSL, 1));
3148
3149 // Check if the key is identical to the name.
3150 __ add(scratch2, elements, Operand(scratch2, LSL, 2));
3151 __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset));
3152 __ cmp(name, Operand(ip));
3153 __ b(eq, done);
3154 }
3155
3156 const int spill_mask =
3157 (lr.bit() | r6.bit() | r5.bit() | r4.bit() |
3158 r3.bit() | r2.bit() | r1.bit() | r0.bit()) &
3159 ~(scratch1.bit() | scratch2.bit());
3160
3161 __ stm(db_w, sp, spill_mask);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003162 if (name.is(r0)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003163 DCHECK(!elements.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003164 __ Move(r1, name);
3165 __ Move(r0, elements);
3166 } else {
3167 __ Move(r0, elements);
3168 __ Move(r1, name);
3169 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003170 NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003171 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003172 __ cmp(r0, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003173 __ mov(scratch2, Operand(r2));
3174 __ ldm(ia_w, sp, spill_mask);
3175
3176 __ b(ne, done);
3177 __ b(eq, miss);
3178}
3179
3180
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003181void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003182 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3183 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003184 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003185 // result: NameDictionary to probe
Ben Murdoch257744e2011-11-30 15:57:28 +00003186 // r1: key
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003187 // dictionary: NameDictionary to probe.
3188 // index: will hold an index of entry if lookup is successful.
3189 // might alias with result_.
Ben Murdoch257744e2011-11-30 15:57:28 +00003190 // Returns:
3191 // result_ is zero if lookup failed, non zero otherwise.
3192
3193 Register result = r0;
3194 Register dictionary = r0;
3195 Register key = r1;
3196 Register index = r2;
3197 Register mask = r3;
3198 Register hash = r4;
3199 Register undefined = r5;
3200 Register entry_key = r6;
3201
3202 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3203
3204 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003205 __ SmiUntag(mask);
Ben Murdoch257744e2011-11-30 15:57:28 +00003206 __ sub(mask, mask, Operand(1));
3207
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003208 __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003209
3210 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3211
3212 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3213 // Compute the masked index: (hash + i + i * i) & mask.
3214 // Capacity is smi 2^n.
3215 if (i > 0) {
3216 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3217 // the hash in a separate instruction. The value hash + i + i * i is right
3218 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003219 DCHECK(NameDictionary::GetProbeOffset(i) <
3220 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003221 __ add(index, hash, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003222 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003223 } else {
3224 __ mov(index, Operand(hash));
3225 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003226 __ and_(index, mask, Operand(index, LSR, Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003227
3228 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003229 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003230 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
3231
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003232 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003233 __ add(index, dictionary, Operand(index, LSL, 2));
3234 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset));
3235
3236 // Having undefined at this place means the name is not contained.
3237 __ cmp(entry_key, Operand(undefined));
3238 __ b(eq, &not_in_dictionary);
3239
3240 // Stop if found the property.
3241 __ cmp(entry_key, Operand(key));
3242 __ b(eq, &in_dictionary);
3243
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003244 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3245 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003246 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
3247 __ ldrb(entry_key,
3248 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003249 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003250 }
3251 }
3252
3253 __ bind(&maybe_in_dictionary);
3254 // If we are doing negative lookup then probing failure should be
3255 // treated as a lookup success. For positive lookup probing failure
3256 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003257 if (mode() == POSITIVE_LOOKUP) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003258 __ mov(result, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003259 __ Ret();
3260 }
3261
3262 __ bind(&in_dictionary);
3263 __ mov(result, Operand(1));
3264 __ Ret();
3265
3266 __ bind(&not_in_dictionary);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003267 __ mov(result, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003268 __ Ret();
3269}
3270
3271
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003272void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3273 Isolate* isolate) {
3274 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3275 stub1.GetCode();
3276 // Hydrogen code stubs need stub2 at snapshot time.
3277 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3278 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003279}
3280
3281
3282// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3283// the value has just been written into the object, now this stub makes sure
3284// we keep the GC informed. The word in the object where the value has been
3285// written is in the address register.
3286void RecordWriteStub::Generate(MacroAssembler* masm) {
3287 Label skip_to_incremental_noncompacting;
3288 Label skip_to_incremental_compacting;
3289
3290 // The first two instructions are generated with labels so as to get the
3291 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3292 // forth between a compare instructions (a nop in this position) and the
3293 // real branch when we start and stop incremental heap marking.
3294 // See RecordWriteStub::Patch for details.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003295 {
3296 // Block literal pool emission, as the position of these two instructions
3297 // is assumed by the patching code.
3298 Assembler::BlockConstPoolScope block_const_pool(masm);
3299 __ b(&skip_to_incremental_noncompacting);
3300 __ b(&skip_to_incremental_compacting);
3301 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003302
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003303 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3304 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003305 MacroAssembler::kReturnAtEnd);
3306 }
3307 __ Ret();
3308
3309 __ bind(&skip_to_incremental_noncompacting);
3310 GenerateIncremental(masm, INCREMENTAL);
3311
3312 __ bind(&skip_to_incremental_compacting);
3313 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3314
3315 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3316 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003317 DCHECK(Assembler::GetBranchOffset(masm->instr_at(0)) < (1 << 12));
3318 DCHECK(Assembler::GetBranchOffset(masm->instr_at(4)) < (1 << 12));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003319 PatchBranchIntoNop(masm, 0);
3320 PatchBranchIntoNop(masm, Assembler::kInstrSize);
3321}
3322
3323
3324void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3325 regs_.Save(masm);
3326
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003327 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003328 Label dont_need_remembered_set;
3329
3330 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
3331 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3332 regs_.scratch0(),
3333 &dont_need_remembered_set);
3334
Ben Murdoch097c5b22016-05-18 11:27:45 +01003335 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3336 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003337
3338 // First notify the incremental marker if necessary, then update the
3339 // remembered set.
3340 CheckNeedsToInformIncrementalMarker(
3341 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003342 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003343 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003344 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003345 MacroAssembler::kReturnAtEnd);
3346
3347 __ bind(&dont_need_remembered_set);
3348 }
3349
3350 CheckNeedsToInformIncrementalMarker(
3351 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003352 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003353 regs_.Restore(masm);
3354 __ Ret();
3355}
3356
3357
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003358void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3359 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003360 int argument_count = 3;
3361 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3362 Register address =
3363 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003364 DCHECK(!address.is(regs_.object()));
3365 DCHECK(!address.is(r0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003366 __ Move(address, regs_.address());
3367 __ Move(r0, regs_.object());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003368 __ Move(r1, address);
3369 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003370
3371 AllowExternalCallThatCantCauseGC scope(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003372 __ CallCFunction(
3373 ExternalReference::incremental_marking_record_write_function(isolate()),
3374 argument_count);
3375 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003376}
3377
3378
3379void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3380 MacroAssembler* masm,
3381 OnNoNeedToInformIncrementalMarker on_no_need,
3382 Mode mode) {
3383 Label on_black;
3384 Label need_incremental;
3385 Label need_incremental_pop_scratch;
3386
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003387 __ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
3388 __ ldr(regs_.scratch1(),
3389 MemOperand(regs_.scratch0(),
3390 MemoryChunk::kWriteBarrierCounterOffset));
3391 __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC);
3392 __ str(regs_.scratch1(),
3393 MemOperand(regs_.scratch0(),
3394 MemoryChunk::kWriteBarrierCounterOffset));
3395 __ b(mi, &need_incremental);
3396
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003397 // Let's look at the color of the object: If it is not black we don't have
3398 // to inform the incremental marker.
3399 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
3400
3401 regs_.Restore(masm);
3402 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003403 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003404 MacroAssembler::kReturnAtEnd);
3405 } else {
3406 __ Ret();
3407 }
3408
3409 __ bind(&on_black);
3410
3411 // Get the value from the slot.
3412 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
3413
3414 if (mode == INCREMENTAL_COMPACTION) {
3415 Label ensure_not_white;
3416
3417 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3418 regs_.scratch1(), // Scratch.
3419 MemoryChunk::kEvacuationCandidateMask,
3420 eq,
3421 &ensure_not_white);
3422
3423 __ CheckPageFlag(regs_.object(),
3424 regs_.scratch1(), // Scratch.
3425 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3426 eq,
3427 &need_incremental);
3428
3429 __ bind(&ensure_not_white);
3430 }
3431
3432 // We need extra registers for this, so we push the object and the address
3433 // register temporarily.
3434 __ Push(regs_.object(), regs_.address());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003435 __ JumpIfWhite(regs_.scratch0(), // The value.
3436 regs_.scratch1(), // Scratch.
3437 regs_.object(), // Scratch.
3438 regs_.address(), // Scratch.
3439 &need_incremental_pop_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003440 __ Pop(regs_.object(), regs_.address());
3441
3442 regs_.Restore(masm);
3443 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003444 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003445 MacroAssembler::kReturnAtEnd);
3446 } else {
3447 __ Ret();
3448 }
3449
3450 __ bind(&need_incremental_pop_scratch);
3451 __ Pop(regs_.object(), regs_.address());
3452
3453 __ bind(&need_incremental);
3454
3455 // Fall through when we need to inform the incremental marker.
3456}
3457
3458
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003459void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3460 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3461 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3462 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003463 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003464 __ ldr(r1, MemOperand(fp, parameter_count_offset));
3465 if (function_mode() == JS_FUNCTION_STUB_MODE) {
3466 __ add(r1, r1, Operand(1));
3467 }
3468 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3469 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2));
3470 __ add(sp, sp, r1);
3471 __ Ret();
3472}
3473
3474
3475void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003476 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003477 LoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003478 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003479}
3480
3481
3482void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003483 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
Ben Murdoch61f157c2016-09-16 13:49:30 +01003484 KeyedLoadICStub stub(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003485 stub.GenerateForTrampoline(masm);
3486}
3487
3488
3489void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3490 __ EmitLoadTypeFeedbackVector(r2);
3491 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003492 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3493}
3494
3495
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003496void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3497
3498
3499void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3500 GenerateImpl(masm, true);
3501}
3502
3503
3504static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3505 Register receiver_map, Register scratch1,
3506 Register scratch2, bool is_polymorphic,
3507 Label* miss) {
3508 // feedback initially contains the feedback array
3509 Label next_loop, prepare_next;
3510 Label start_polymorphic;
3511
3512 Register cached_map = scratch1;
3513
3514 __ ldr(cached_map,
3515 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3516 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3517 __ cmp(receiver_map, cached_map);
3518 __ b(ne, &start_polymorphic);
3519 // found, now call handler.
3520 Register handler = feedback;
3521 __ ldr(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3522 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3523
3524
3525 Register length = scratch2;
3526 __ bind(&start_polymorphic);
3527 __ ldr(length, FieldMemOperand(feedback, FixedArray::kLengthOffset));
3528 if (!is_polymorphic) {
3529 // If the IC could be monomorphic we have to make sure we don't go past the
3530 // end of the feedback array.
3531 __ cmp(length, Operand(Smi::FromInt(2)));
3532 __ b(eq, miss);
3533 }
3534
3535 Register too_far = length;
3536 Register pointer_reg = feedback;
3537
3538 // +-----+------+------+-----+-----+ ... ----+
3539 // | map | len | wm0 | h0 | wm1 | hN |
3540 // +-----+------+------+-----+-----+ ... ----+
3541 // 0 1 2 len-1
3542 // ^ ^
3543 // | |
3544 // pointer_reg too_far
3545 // aka feedback scratch2
3546 // also need receiver_map
3547 // use cached_map (scratch1) to look in the weak map values.
3548 __ add(too_far, feedback, Operand::PointerOffsetFromSmiKey(length));
3549 __ add(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3550 __ add(pointer_reg, feedback,
3551 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag));
3552
3553 __ bind(&next_loop);
3554 __ ldr(cached_map, MemOperand(pointer_reg));
3555 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3556 __ cmp(receiver_map, cached_map);
3557 __ b(ne, &prepare_next);
3558 __ ldr(handler, MemOperand(pointer_reg, kPointerSize));
3559 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3560
3561 __ bind(&prepare_next);
3562 __ add(pointer_reg, pointer_reg, Operand(kPointerSize * 2));
3563 __ cmp(pointer_reg, too_far);
3564 __ b(lt, &next_loop);
3565
3566 // We exhausted our array of map handler pairs.
3567 __ jmp(miss);
3568}
3569
3570
3571static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3572 Register receiver_map, Register feedback,
3573 Register vector, Register slot,
3574 Register scratch, Label* compare_map,
3575 Label* load_smi_map, Label* try_array) {
3576 __ JumpIfSmi(receiver, load_smi_map);
3577 __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
3578 __ bind(compare_map);
3579 Register cached_map = scratch;
3580 // Move the weak map into the weak_cell register.
3581 __ ldr(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset));
3582 __ cmp(cached_map, receiver_map);
3583 __ b(ne, try_array);
3584 Register handler = feedback;
3585 __ add(handler, vector, Operand::PointerOffsetFromSmiKey(slot));
3586 __ ldr(handler,
3587 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
3588 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3589}
3590
3591
3592void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3593 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
3594 Register name = LoadWithVectorDescriptor::NameRegister(); // r2
3595 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
3596 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
3597 Register feedback = r4;
3598 Register receiver_map = r5;
3599 Register scratch1 = r6;
3600
3601 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3602 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3603
3604 // Try to quickly handle the monomorphic case without knowing for sure
3605 // if we have a weak cell in feedback. We do know it's safe to look
3606 // at WeakCell::kValueOffset.
3607 Label try_array, load_smi_map, compare_map;
3608 Label not_array, miss;
3609 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3610 scratch1, &compare_map, &load_smi_map, &try_array);
3611
3612 // Is it a fixed array?
3613 __ bind(&try_array);
3614 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3615 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
3616 __ b(ne, &not_array);
3617 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, true, &miss);
3618
3619 __ bind(&not_array);
3620 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3621 __ b(ne, &miss);
Ben Murdochc5610432016-08-08 18:44:38 +01003622 Code::Flags code_flags =
3623 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003624 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
3625 receiver, name, feedback,
3626 receiver_map, scratch1, r9);
3627
3628 __ bind(&miss);
3629 LoadIC::GenerateMiss(masm);
3630
3631 __ bind(&load_smi_map);
3632 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3633 __ jmp(&compare_map);
3634}
3635
3636
3637void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3638 GenerateImpl(masm, false);
3639}
3640
3641
3642void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3643 GenerateImpl(masm, true);
3644}
3645
3646
3647void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3648 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
3649 Register key = LoadWithVectorDescriptor::NameRegister(); // r2
3650 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
3651 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
3652 Register feedback = r4;
3653 Register receiver_map = r5;
3654 Register scratch1 = r6;
3655
3656 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3657 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3658
3659 // Try to quickly handle the monomorphic case without knowing for sure
3660 // if we have a weak cell in feedback. We do know it's safe to look
3661 // at WeakCell::kValueOffset.
3662 Label try_array, load_smi_map, compare_map;
3663 Label not_array, miss;
3664 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3665 scratch1, &compare_map, &load_smi_map, &try_array);
3666
3667 __ bind(&try_array);
3668 // Is it a fixed array?
3669 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3670 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
3671 __ b(ne, &not_array);
3672
3673 // We have a polymorphic element handler.
3674 Label polymorphic, try_poly_name;
3675 __ bind(&polymorphic);
3676 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, true, &miss);
3677
3678 __ bind(&not_array);
3679 // Is it generic?
3680 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3681 __ b(ne, &try_poly_name);
3682 Handle<Code> megamorphic_stub =
3683 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3684 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
3685
3686 __ bind(&try_poly_name);
3687 // We might have a name in feedback, and a fixed array in the next slot.
3688 __ cmp(key, feedback);
3689 __ b(ne, &miss);
3690 // If the name comparison succeeded, we know we have a fixed array with
3691 // at least one map/handler pair.
3692 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3693 __ ldr(feedback,
3694 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
3695 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, false, &miss);
3696
3697 __ bind(&miss);
3698 KeyedLoadIC::GenerateMiss(masm);
3699
3700 __ bind(&load_smi_map);
3701 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3702 __ jmp(&compare_map);
3703}
3704
3705
3706void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3707 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3708 VectorStoreICStub stub(isolate(), state());
3709 stub.GenerateForTrampoline(masm);
3710}
3711
3712
3713void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3714 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3715 VectorKeyedStoreICStub stub(isolate(), state());
3716 stub.GenerateForTrampoline(masm);
3717}
3718
3719
3720void VectorStoreICStub::Generate(MacroAssembler* masm) {
3721 GenerateImpl(masm, false);
3722}
3723
3724
3725void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3726 GenerateImpl(masm, true);
3727}
3728
3729
3730void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3731 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
3732 Register key = VectorStoreICDescriptor::NameRegister(); // r2
3733 Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
3734 Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
3735 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
3736 Register feedback = r5;
3737 Register receiver_map = r6;
3738 Register scratch1 = r9;
3739
3740 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3741 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3742
3743 // Try to quickly handle the monomorphic case without knowing for sure
3744 // if we have a weak cell in feedback. We do know it's safe to look
3745 // at WeakCell::kValueOffset.
3746 Label try_array, load_smi_map, compare_map;
3747 Label not_array, miss;
3748 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3749 scratch1, &compare_map, &load_smi_map, &try_array);
3750
3751 // Is it a fixed array?
3752 __ bind(&try_array);
3753 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3754 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
3755 __ b(ne, &not_array);
3756
3757 // We are using register r8, which is used for the embedded constant pool
3758 // when FLAG_enable_embedded_constant_pool is true.
3759 DCHECK(!FLAG_enable_embedded_constant_pool);
3760 Register scratch2 = r8;
3761 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true,
3762 &miss);
3763
3764 __ bind(&not_array);
3765 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3766 __ b(ne, &miss);
Ben Murdochc5610432016-08-08 18:44:38 +01003767 Code::Flags code_flags =
3768 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003769 masm->isolate()->stub_cache()->GenerateProbe(
3770 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map,
3771 scratch1, scratch2);
3772
3773 __ bind(&miss);
3774 StoreIC::GenerateMiss(masm);
3775
3776 __ bind(&load_smi_map);
3777 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3778 __ jmp(&compare_map);
3779}
3780
3781
3782void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
3783 GenerateImpl(masm, false);
3784}
3785
3786
3787void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3788 GenerateImpl(masm, true);
3789}
3790
3791
3792static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
3793 Register receiver_map, Register scratch1,
3794 Register scratch2, Label* miss) {
3795 // feedback initially contains the feedback array
3796 Label next_loop, prepare_next;
3797 Label start_polymorphic;
3798 Label transition_call;
3799
3800 Register cached_map = scratch1;
3801 Register too_far = scratch2;
3802 Register pointer_reg = feedback;
3803 __ ldr(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset));
3804
3805 // +-----+------+------+-----+-----+-----+ ... ----+
3806 // | map | len | wm0 | wt0 | h0 | wm1 | hN |
3807 // +-----+------+------+-----+-----+ ----+ ... ----+
3808 // 0 1 2 len-1
3809 // ^ ^
3810 // | |
3811 // pointer_reg too_far
3812 // aka feedback scratch2
3813 // also need receiver_map
3814 // use cached_map (scratch1) to look in the weak map values.
3815 __ add(too_far, feedback, Operand::PointerOffsetFromSmiKey(too_far));
3816 __ add(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3817 __ add(pointer_reg, feedback,
3818 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag));
3819
3820 __ bind(&next_loop);
3821 __ ldr(cached_map, MemOperand(pointer_reg));
3822 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3823 __ cmp(receiver_map, cached_map);
3824 __ b(ne, &prepare_next);
3825 // Is it a transitioning store?
3826 __ ldr(too_far, MemOperand(pointer_reg, kPointerSize));
3827 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex);
3828 __ b(ne, &transition_call);
3829 __ ldr(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2));
3830 __ add(pc, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
3831
3832 __ bind(&transition_call);
3833 __ ldr(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset));
3834 __ JumpIfSmi(too_far, miss);
3835
3836 __ ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
3837
3838 // Load the map into the correct register.
3839 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
3840 __ mov(feedback, too_far);
3841
3842 __ add(pc, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
3843
3844 __ bind(&prepare_next);
3845 __ add(pointer_reg, pointer_reg, Operand(kPointerSize * 3));
3846 __ cmp(pointer_reg, too_far);
3847 __ b(lt, &next_loop);
3848
3849 // We exhausted our array of map handler pairs.
3850 __ jmp(miss);
3851}
3852
3853
3854void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3855 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
3856 Register key = VectorStoreICDescriptor::NameRegister(); // r2
3857 Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
3858 Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
3859 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
3860 Register feedback = r5;
3861 Register receiver_map = r6;
3862 Register scratch1 = r9;
3863
3864 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3865 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3866
3867 // Try to quickly handle the monomorphic case without knowing for sure
3868 // if we have a weak cell in feedback. We do know it's safe to look
3869 // at WeakCell::kValueOffset.
3870 Label try_array, load_smi_map, compare_map;
3871 Label not_array, miss;
3872 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3873 scratch1, &compare_map, &load_smi_map, &try_array);
3874
3875 __ bind(&try_array);
3876 // Is it a fixed array?
3877 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3878 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
3879 __ b(ne, &not_array);
3880
3881 // We have a polymorphic element handler.
3882 Label polymorphic, try_poly_name;
3883 __ bind(&polymorphic);
3884
3885 // We are using register r8, which is used for the embedded constant pool
3886 // when FLAG_enable_embedded_constant_pool is true.
3887 DCHECK(!FLAG_enable_embedded_constant_pool);
3888 Register scratch2 = r8;
3889
3890 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2,
3891 &miss);
3892
3893 __ bind(&not_array);
3894 // Is it generic?
3895 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3896 __ b(ne, &try_poly_name);
3897 Handle<Code> megamorphic_stub =
3898 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3899 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
3900
3901 __ bind(&try_poly_name);
3902 // We might have a name in feedback, and a fixed array in the next slot.
3903 __ cmp(key, feedback);
3904 __ b(ne, &miss);
3905 // If the name comparison succeeded, we know we have a fixed array with
3906 // at least one map/handler pair.
3907 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3908 __ ldr(feedback,
3909 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
3910 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false,
3911 &miss);
3912
3913 __ bind(&miss);
3914 KeyedStoreIC::GenerateMiss(masm);
3915
3916 __ bind(&load_smi_map);
3917 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3918 __ jmp(&compare_map);
3919}
3920
3921
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003922void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3923 if (masm->isolate()->function_entry_hook() != NULL) {
3924 ProfileEntryHookStub stub(masm->isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003925 PredictableCodeSizeScope predictable(masm);
3926 predictable.ExpectSize(masm->CallStubSize(&stub) +
3927 2 * Assembler::kInstrSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003928 __ push(lr);
3929 __ CallStub(&stub);
3930 __ pop(lr);
3931 }
3932}
3933
3934
3935void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3936 // The entry hook is a "push lr" instruction, followed by a call.
3937 const int32_t kReturnAddressDistanceFromFunctionStart =
3938 3 * Assembler::kInstrSize;
3939
3940 // This should contain all kCallerSaved registers.
3941 const RegList kSavedRegs =
3942 1 << 0 | // r0
3943 1 << 1 | // r1
3944 1 << 2 | // r2
3945 1 << 3 | // r3
3946 1 << 5 | // r5
3947 1 << 9; // r9
3948 // We also save lr, so the count here is one higher than the mask indicates.
3949 const int32_t kNumSavedRegs = 7;
3950
3951 DCHECK((kCallerSaved & kSavedRegs) == kCallerSaved);
3952
3953 // Save all caller-save registers as this may be called from anywhere.
3954 __ stm(db_w, sp, kSavedRegs | lr.bit());
3955
3956 // Compute the function's address for the first argument.
3957 __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart));
3958
3959 // The caller's return address is above the saved temporaries.
3960 // Grab that for the second argument to the hook.
3961 __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize));
3962
3963 // Align the stack if necessary.
3964 int frame_alignment = masm->ActivationFrameAlignment();
3965 if (frame_alignment > kPointerSize) {
3966 __ mov(r5, sp);
3967 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
3968 __ and_(sp, sp, Operand(-frame_alignment));
3969 }
3970
3971#if V8_HOST_ARCH_ARM
3972 int32_t entry_hook =
3973 reinterpret_cast<int32_t>(isolate()->function_entry_hook());
3974 __ mov(ip, Operand(entry_hook));
3975#else
3976 // Under the simulator we need to indirect the entry hook through a
3977 // trampoline function at a known address.
3978 // It additionally takes an isolate as a third parameter
3979 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
3980
3981 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
3982 __ mov(ip, Operand(ExternalReference(&dispatcher,
3983 ExternalReference::BUILTIN_CALL,
3984 isolate())));
3985#endif
3986 __ Call(ip);
3987
3988 // Restore the stack pointer if needed.
3989 if (frame_alignment > kPointerSize) {
3990 __ mov(sp, r5);
3991 }
3992
3993 // Also pop pc to get Ret(0).
3994 __ ldm(ia_w, sp, kSavedRegs | pc.bit());
3995}
3996
3997
3998template<class T>
3999static void CreateArrayDispatch(MacroAssembler* masm,
4000 AllocationSiteOverrideMode mode) {
4001 if (mode == DISABLE_ALLOCATION_SITES) {
4002 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4003 __ TailCallStub(&stub);
4004 } else if (mode == DONT_OVERRIDE) {
4005 int last_index = GetSequenceIndexFromFastElementsKind(
4006 TERMINAL_FAST_ELEMENTS_KIND);
4007 for (int i = 0; i <= last_index; ++i) {
4008 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4009 __ cmp(r3, Operand(kind));
4010 T stub(masm->isolate(), kind);
4011 __ TailCallStub(&stub, eq);
4012 }
4013
4014 // If we reached this point there is a problem.
4015 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4016 } else {
4017 UNREACHABLE();
4018 }
4019}
4020
4021
4022static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4023 AllocationSiteOverrideMode mode) {
4024 // r2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4025 // r3 - kind (if mode != DISABLE_ALLOCATION_SITES)
4026 // r0 - number of arguments
4027 // r1 - constructor?
4028 // sp[0] - last argument
4029 Label normal_sequence;
4030 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004031 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4032 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4033 STATIC_ASSERT(FAST_ELEMENTS == 2);
4034 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4035 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4036 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004037
4038 // is the low bit set? If so, we are holey and that is good.
4039 __ tst(r3, Operand(1));
4040 __ b(ne, &normal_sequence);
4041 }
4042
4043 // look at the first argument
4044 __ ldr(r5, MemOperand(sp, 0));
4045 __ cmp(r5, Operand::Zero());
4046 __ b(eq, &normal_sequence);
4047
4048 if (mode == DISABLE_ALLOCATION_SITES) {
4049 ElementsKind initial = GetInitialFastElementsKind();
4050 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4051
4052 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4053 holey_initial,
4054 DISABLE_ALLOCATION_SITES);
4055 __ TailCallStub(&stub_holey);
4056
4057 __ bind(&normal_sequence);
4058 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4059 initial,
4060 DISABLE_ALLOCATION_SITES);
4061 __ TailCallStub(&stub);
4062 } else if (mode == DONT_OVERRIDE) {
4063 // We are going to create a holey array, but our kind is non-holey.
4064 // Fix kind and retry (only if we have an allocation site in the slot).
4065 __ add(r3, r3, Operand(1));
4066
4067 if (FLAG_debug_code) {
4068 __ ldr(r5, FieldMemOperand(r2, 0));
4069 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
4070 __ Assert(eq, kExpectedAllocationSite);
4071 }
4072
4073 // Save the resulting elements kind in type info. We can't just store r3
4074 // in the AllocationSite::transition_info field because elements kind is
4075 // restricted to a portion of the field...upper bits need to be left alone.
4076 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4077 __ ldr(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4078 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
4079 __ str(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4080
4081 __ bind(&normal_sequence);
4082 int last_index = GetSequenceIndexFromFastElementsKind(
4083 TERMINAL_FAST_ELEMENTS_KIND);
4084 for (int i = 0; i <= last_index; ++i) {
4085 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4086 __ cmp(r3, Operand(kind));
4087 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4088 __ TailCallStub(&stub, eq);
4089 }
4090
4091 // If we reached this point there is a problem.
4092 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4093 } else {
4094 UNREACHABLE();
4095 }
4096}
4097
4098
4099template<class T>
4100static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4101 int to_index = GetSequenceIndexFromFastElementsKind(
4102 TERMINAL_FAST_ELEMENTS_KIND);
4103 for (int i = 0; i <= to_index; ++i) {
4104 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4105 T stub(isolate, kind);
4106 stub.GetCode();
4107 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4108 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4109 stub1.GetCode();
4110 }
4111 }
4112}
4113
Ben Murdoch61f157c2016-09-16 13:49:30 +01004114void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004115 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4116 isolate);
4117 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4118 isolate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004119 ArrayNArgumentsConstructorStub stub(isolate);
4120 stub.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004121 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4122 for (int i = 0; i < 2; i++) {
4123 // For internal arrays we only need a few things
4124 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4125 stubh1.GetCode();
4126 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4127 stubh2.GetCode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004128 }
4129}
4130
4131
4132void ArrayConstructorStub::GenerateDispatchToArrayStub(
4133 MacroAssembler* masm,
4134 AllocationSiteOverrideMode mode) {
4135 if (argument_count() == ANY) {
4136 Label not_zero_case, not_one_case;
4137 __ tst(r0, r0);
4138 __ b(ne, &not_zero_case);
4139 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4140
4141 __ bind(&not_zero_case);
4142 __ cmp(r0, Operand(1));
4143 __ b(gt, &not_one_case);
4144 CreateArrayDispatchOneArgument(masm, mode);
4145
4146 __ bind(&not_one_case);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004147 ArrayNArgumentsConstructorStub stub(masm->isolate());
4148 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004149 } else if (argument_count() == NONE) {
4150 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4151 } else if (argument_count() == ONE) {
4152 CreateArrayDispatchOneArgument(masm, mode);
4153 } else if (argument_count() == MORE_THAN_ONE) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01004154 ArrayNArgumentsConstructorStub stub(masm->isolate());
4155 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004156 } else {
4157 UNREACHABLE();
4158 }
4159}
4160
4161
4162void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4163 // ----------- S t a t e -------------
4164 // -- r0 : argc (only if argument_count() == ANY)
4165 // -- r1 : constructor
4166 // -- r2 : AllocationSite or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004167 // -- r3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004168 // -- sp[0] : return address
4169 // -- sp[4] : last argument
4170 // -----------------------------------
4171
4172 if (FLAG_debug_code) {
4173 // The array construct code is only set for the global and natives
4174 // builtin Array functions which always have maps.
4175
4176 // Initial map for the builtin Array function should be a map.
4177 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4178 // Will both indicate a NULL and a Smi.
4179 __ tst(r4, Operand(kSmiTagMask));
4180 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
4181 __ CompareObjectType(r4, r4, r5, MAP_TYPE);
4182 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
4183
4184 // We should either have undefined in r2 or a valid AllocationSite
4185 __ AssertUndefinedOrAllocationSite(r2, r4);
4186 }
4187
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004188 // Enter the context of the Array function.
4189 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
4190
4191 Label subclassing;
4192 __ cmp(r3, r1);
4193 __ b(ne, &subclassing);
4194
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004195 Label no_info;
4196 // Get the elements kind and case on that.
4197 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
4198 __ b(eq, &no_info);
4199
4200 __ ldr(r3, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4201 __ SmiUntag(r3);
4202 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4203 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
4204 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4205
4206 __ bind(&no_info);
4207 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004208
4209 __ bind(&subclassing);
4210 switch (argument_count()) {
4211 case ANY:
4212 case MORE_THAN_ONE:
4213 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4214 __ add(r0, r0, Operand(3));
4215 break;
4216 case NONE:
4217 __ str(r1, MemOperand(sp, 0 * kPointerSize));
4218 __ mov(r0, Operand(3));
4219 break;
4220 case ONE:
4221 __ str(r1, MemOperand(sp, 1 * kPointerSize));
4222 __ mov(r0, Operand(4));
4223 break;
4224 }
4225 __ Push(r3, r2);
4226 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004227}
4228
4229
4230void InternalArrayConstructorStub::GenerateCase(
4231 MacroAssembler* masm, ElementsKind kind) {
4232 __ cmp(r0, Operand(1));
4233
4234 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4235 __ TailCallStub(&stub0, lo);
4236
Ben Murdoch61f157c2016-09-16 13:49:30 +01004237 ArrayNArgumentsConstructorStub stubN(isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004238 __ TailCallStub(&stubN, hi);
4239
4240 if (IsFastPackedElementsKind(kind)) {
4241 // We might need to create a holey array
4242 // look at the first argument
4243 __ ldr(r3, MemOperand(sp, 0));
4244 __ cmp(r3, Operand::Zero());
4245
4246 InternalArraySingleArgumentConstructorStub
4247 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4248 __ TailCallStub(&stub1_holey, ne);
4249 }
4250
4251 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4252 __ TailCallStub(&stub1);
4253}
4254
4255
4256void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4257 // ----------- S t a t e -------------
4258 // -- r0 : argc
4259 // -- r1 : constructor
4260 // -- sp[0] : return address
4261 // -- sp[4] : last argument
4262 // -----------------------------------
4263
4264 if (FLAG_debug_code) {
4265 // The array construct code is only set for the global and natives
4266 // builtin Array functions which always have maps.
4267
4268 // Initial map for the builtin Array function should be a map.
4269 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4270 // Will both indicate a NULL and a Smi.
4271 __ tst(r3, Operand(kSmiTagMask));
4272 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
4273 __ CompareObjectType(r3, r3, r4, MAP_TYPE);
4274 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
4275 }
4276
4277 // Figure out the right elements kind
4278 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4279 // Load the map's "bit field 2" into |result|. We only need the first byte,
4280 // but the following bit field extraction takes care of that anyway.
4281 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset));
4282 // Retrieve elements_kind from bit field 2.
4283 __ DecodeField<Map::ElementsKindBits>(r3);
4284
4285 if (FLAG_debug_code) {
4286 Label done;
4287 __ cmp(r3, Operand(FAST_ELEMENTS));
4288 __ b(eq, &done);
4289 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
4290 __ Assert(eq,
4291 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4292 __ bind(&done);
4293 }
4294
4295 Label fast_elements_case;
4296 __ cmp(r3, Operand(FAST_ELEMENTS));
4297 __ b(eq, &fast_elements_case);
4298 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4299
4300 __ bind(&fast_elements_case);
4301 GenerateCase(masm, FAST_ELEMENTS);
4302}
4303
4304
Ben Murdoch097c5b22016-05-18 11:27:45 +01004305void FastNewObjectStub::Generate(MacroAssembler* masm) {
4306 // ----------- S t a t e -------------
4307 // -- r1 : target
4308 // -- r3 : new target
4309 // -- cp : context
4310 // -- lr : return address
4311 // -----------------------------------
4312 __ AssertFunction(r1);
4313 __ AssertReceiver(r3);
4314
4315 // Verify that the new target is a JSFunction.
4316 Label new_object;
4317 __ CompareObjectType(r3, r2, r2, JS_FUNCTION_TYPE);
4318 __ b(ne, &new_object);
4319
4320 // Load the initial map and verify that it's in fact a map.
4321 __ ldr(r2, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
4322 __ JumpIfSmi(r2, &new_object);
4323 __ CompareObjectType(r2, r0, r0, MAP_TYPE);
4324 __ b(ne, &new_object);
4325
4326 // Fall back to runtime if the target differs from the new target's
4327 // initial map constructor.
4328 __ ldr(r0, FieldMemOperand(r2, Map::kConstructorOrBackPointerOffset));
4329 __ cmp(r0, r1);
4330 __ b(ne, &new_object);
4331
4332 // Allocate the JSObject on the heap.
4333 Label allocate, done_allocate;
4334 __ ldrb(r4, FieldMemOperand(r2, Map::kInstanceSizeOffset));
4335 __ Allocate(r4, r0, r5, r6, &allocate, SIZE_IN_WORDS);
4336 __ bind(&done_allocate);
4337
4338 // Initialize the JSObject fields.
Ben Murdochc5610432016-08-08 18:44:38 +01004339 __ str(r2, FieldMemOperand(r0, JSObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004340 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +01004341 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
4342 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004343 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01004344 __ add(r1, r0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004345
4346 // ----------- S t a t e -------------
Ben Murdochc5610432016-08-08 18:44:38 +01004347 // -- r0 : result (tagged)
Ben Murdoch097c5b22016-05-18 11:27:45 +01004348 // -- r1 : result fields (untagged)
4349 // -- r5 : result end (untagged)
4350 // -- r2 : initial map
4351 // -- cp : context
4352 // -- lr : return address
4353 // -----------------------------------
4354
4355 // Perform in-object slack tracking if requested.
4356 Label slack_tracking;
4357 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4358 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
4359 __ ldr(r3, FieldMemOperand(r2, Map::kBitField3Offset));
4360 __ tst(r3, Operand(Map::ConstructionCounter::kMask));
4361 __ b(ne, &slack_tracking);
4362 {
4363 // Initialize all in-object fields with undefined.
4364 __ InitializeFieldsWithFiller(r1, r5, r6);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004365 __ Ret();
4366 }
4367 __ bind(&slack_tracking);
4368 {
4369 // Decrease generous allocation count.
4370 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4371 __ sub(r3, r3, Operand(1 << Map::ConstructionCounter::kShift));
4372 __ str(r3, FieldMemOperand(r2, Map::kBitField3Offset));
4373
4374 // Initialize the in-object fields with undefined.
4375 __ ldrb(r4, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
4376 __ sub(r4, r5, Operand(r4, LSL, kPointerSizeLog2));
4377 __ InitializeFieldsWithFiller(r1, r4, r6);
4378
4379 // Initialize the remaining (reserved) fields with one pointer filler map.
4380 __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
4381 __ InitializeFieldsWithFiller(r1, r5, r6);
4382
Ben Murdoch097c5b22016-05-18 11:27:45 +01004383 // Check if we can finalize the instance size.
4384 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4385 __ tst(r3, Operand(Map::ConstructionCounter::kMask));
4386 __ Ret(ne);
4387
4388 // Finalize the instance size.
4389 {
4390 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4391 __ Push(r0, r2);
4392 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4393 __ Pop(r0);
4394 }
4395 __ Ret();
4396 }
4397
4398 // Fall back to %AllocateInNewSpace.
4399 __ bind(&allocate);
4400 {
4401 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4402 STATIC_ASSERT(kSmiTag == 0);
4403 STATIC_ASSERT(kSmiTagSize == 1);
4404 __ mov(r4, Operand(r4, LSL, kPointerSizeLog2 + 1));
4405 __ Push(r2, r4);
4406 __ CallRuntime(Runtime::kAllocateInNewSpace);
4407 __ Pop(r2);
4408 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01004409 __ ldrb(r5, FieldMemOperand(r2, Map::kInstanceSizeOffset));
4410 __ add(r5, r0, Operand(r5, LSL, kPointerSizeLog2));
Ben Murdochc5610432016-08-08 18:44:38 +01004411 STATIC_ASSERT(kHeapObjectTag == 1);
4412 __ sub(r5, r5, Operand(kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004413 __ b(&done_allocate);
4414
4415 // Fall back to %NewObject.
4416 __ bind(&new_object);
4417 __ Push(r1, r3);
4418 __ TailCallRuntime(Runtime::kNewObject);
4419}
4420
4421
4422void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4423 // ----------- S t a t e -------------
4424 // -- r1 : function
4425 // -- cp : context
4426 // -- fp : frame pointer
4427 // -- lr : return address
4428 // -----------------------------------
4429 __ AssertFunction(r1);
4430
Ben Murdochc5610432016-08-08 18:44:38 +01004431 // Make r2 point to the JavaScript frame.
4432 __ mov(r2, fp);
4433 if (skip_stub_frame()) {
4434 // For Ignition we need to skip the handler/stub frame to reach the
4435 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004436 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004437 }
4438 if (FLAG_debug_code) {
4439 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004440 __ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004441 __ cmp(ip, r1);
Ben Murdochc5610432016-08-08 18:44:38 +01004442 __ b(eq, &ok);
4443 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4444 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004445 }
4446
4447 // Check if we have rest parameters (only possible if we have an
4448 // arguments adaptor frame below the function frame).
4449 Label no_rest_parameters;
4450 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004451 __ ldr(ip, MemOperand(r2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004452 __ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4453 __ b(ne, &no_rest_parameters);
4454
4455 // Check if the arguments adaptor frame contains more arguments than
4456 // specified by the function's internal formal parameter count.
4457 Label rest_parameters;
4458 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
Ben Murdoch61f157c2016-09-16 13:49:30 +01004459 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
4460 __ ldr(r3,
4461 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
4462 __ sub(r0, r0, r3, SetCC);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004463 __ b(gt, &rest_parameters);
4464
4465 // Return an empty rest parameter array.
4466 __ bind(&no_rest_parameters);
4467 {
4468 // ----------- S t a t e -------------
4469 // -- cp : context
4470 // -- lr : return address
4471 // -----------------------------------
4472
4473 // Allocate an empty rest parameter array.
4474 Label allocate, done_allocate;
Ben Murdochc5610432016-08-08 18:44:38 +01004475 __ Allocate(JSArray::kSize, r0, r1, r2, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004476 __ bind(&done_allocate);
4477
4478 // Setup the rest parameter array in r0.
4479 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
4480 __ str(r1, FieldMemOperand(r0, JSArray::kMapOffset));
4481 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
4482 __ str(r1, FieldMemOperand(r0, JSArray::kPropertiesOffset));
4483 __ str(r1, FieldMemOperand(r0, JSArray::kElementsOffset));
4484 __ mov(r1, Operand(0));
4485 __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
4486 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4487 __ Ret();
4488
4489 // Fall back to %AllocateInNewSpace.
4490 __ bind(&allocate);
4491 {
4492 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4493 __ Push(Smi::FromInt(JSArray::kSize));
4494 __ CallRuntime(Runtime::kAllocateInNewSpace);
4495 }
4496 __ jmp(&done_allocate);
4497 }
4498
4499 __ bind(&rest_parameters);
4500 {
4501 // Compute the pointer to the first rest parameter (skippping the receiver).
4502 __ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
4503 __ add(r2, r2,
4504 Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4505
4506 // ----------- S t a t e -------------
4507 // -- cp : context
4508 // -- r0 : number of rest parameters (tagged)
Ben Murdoch61f157c2016-09-16 13:49:30 +01004509 // -- r1 : function
Ben Murdoch097c5b22016-05-18 11:27:45 +01004510 // -- r2 : pointer to first rest parameters
4511 // -- lr : return address
4512 // -----------------------------------
4513
4514 // Allocate space for the rest parameter array plus the backing store.
4515 Label allocate, done_allocate;
Ben Murdoch61f157c2016-09-16 13:49:30 +01004516 __ mov(r6, Operand(JSArray::kSize + FixedArray::kHeaderSize));
4517 __ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
4518 __ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004519 __ bind(&done_allocate);
4520
4521 // Setup the elements array in r3.
4522 __ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
4523 __ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
4524 __ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
4525 __ add(r4, r3, Operand(FixedArray::kHeaderSize));
4526 {
4527 Label loop, done_loop;
4528 __ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
4529 __ bind(&loop);
4530 __ cmp(r4, r1);
4531 __ b(eq, &done_loop);
4532 __ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
4533 __ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
4534 __ add(r4, r4, Operand(1 * kPointerSize));
4535 __ b(&loop);
4536 __ bind(&done_loop);
4537 }
4538
4539 // Setup the rest parameter array in r4.
4540 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
4541 __ str(r1, FieldMemOperand(r4, JSArray::kMapOffset));
4542 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
4543 __ str(r1, FieldMemOperand(r4, JSArray::kPropertiesOffset));
4544 __ str(r3, FieldMemOperand(r4, JSArray::kElementsOffset));
4545 __ str(r0, FieldMemOperand(r4, JSArray::kLengthOffset));
4546 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4547 __ mov(r0, r4);
4548 __ Ret();
4549
Ben Murdoch61f157c2016-09-16 13:49:30 +01004550 // Fall back to %AllocateInNewSpace (if not too big).
4551 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004552 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004553 __ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
4554 __ b(gt, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004555 {
4556 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004557 __ SmiTag(r6);
4558 __ Push(r0, r2, r6);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004559 __ CallRuntime(Runtime::kAllocateInNewSpace);
4560 __ mov(r3, r0);
4561 __ Pop(r0, r2);
4562 }
4563 __ jmp(&done_allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004564
4565 // Fall back to %NewRestParameter.
4566 __ bind(&too_big_for_new_space);
4567 __ push(r1);
4568 __ TailCallRuntime(Runtime::kNewRestParameter);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004569 }
4570}
4571
4572
4573void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4574 // ----------- S t a t e -------------
4575 // -- r1 : function
4576 // -- cp : context
4577 // -- fp : frame pointer
4578 // -- lr : return address
4579 // -----------------------------------
4580 __ AssertFunction(r1);
4581
Ben Murdochc5610432016-08-08 18:44:38 +01004582 // Make r9 point to the JavaScript frame.
4583 __ mov(r9, fp);
4584 if (skip_stub_frame()) {
4585 // For Ignition we need to skip the handler/stub frame to reach the
4586 // JavaScript frame for the function.
4587 __ ldr(r9, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
4588 }
4589 if (FLAG_debug_code) {
4590 Label ok;
4591 __ ldr(ip, MemOperand(r9, StandardFrameConstants::kFunctionOffset));
4592 __ cmp(ip, r1);
4593 __ b(eq, &ok);
4594 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4595 __ bind(&ok);
4596 }
4597
Ben Murdoch097c5b22016-05-18 11:27:45 +01004598 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4599 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
4600 __ ldr(r2,
4601 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004602 __ add(r3, r9, Operand(r2, LSL, kPointerSizeLog2 - 1));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004603 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
4604
4605 // r1 : function
4606 // r2 : number of parameters (tagged)
4607 // r3 : parameters pointer
Ben Murdochc5610432016-08-08 18:44:38 +01004608 // r9 : JavaScript frame pointer
Ben Murdoch097c5b22016-05-18 11:27:45 +01004609 // Registers used over whole function:
4610 // r5 : arguments count (tagged)
4611 // r6 : mapped parameter count (tagged)
4612
4613 // Check if the calling frame is an arguments adaptor frame.
4614 Label adaptor_frame, try_allocate, runtime;
Ben Murdochc5610432016-08-08 18:44:38 +01004615 __ ldr(r4, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004616 __ ldr(r0, MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004617 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4618 __ b(eq, &adaptor_frame);
4619
4620 // No adaptor, parameter count = argument count.
4621 __ mov(r5, r2);
4622 __ mov(r6, r2);
4623 __ b(&try_allocate);
4624
4625 // We have an adaptor frame. Patch the parameters pointer.
4626 __ bind(&adaptor_frame);
4627 __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
4628 __ add(r4, r4, Operand(r5, LSL, 1));
4629 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
4630
4631 // r5 = argument count (tagged)
4632 // r6 = parameter count (tagged)
4633 // Compute the mapped parameter count = min(r6, r5) in r6.
4634 __ mov(r6, r2);
4635 __ cmp(r6, Operand(r5));
4636 __ mov(r6, Operand(r5), LeaveCC, gt);
4637
4638 __ bind(&try_allocate);
4639
4640 // Compute the sizes of backing store, parameter map, and arguments object.
4641 // 1. Parameter map, has 2 extra words containing context and backing store.
4642 const int kParameterMapHeaderSize =
4643 FixedArray::kHeaderSize + 2 * kPointerSize;
4644 // If there are no mapped parameters, we do not need the parameter_map.
4645 __ cmp(r6, Operand(Smi::FromInt(0)));
4646 __ mov(r9, Operand::Zero(), LeaveCC, eq);
4647 __ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
4648 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
4649
4650 // 2. Backing store.
4651 __ add(r9, r9, Operand(r5, LSL, 1));
4652 __ add(r9, r9, Operand(FixedArray::kHeaderSize));
4653
4654 // 3. Arguments object.
4655 __ add(r9, r9, Operand(JSSloppyArgumentsObject::kSize));
4656
4657 // Do the allocation of all three objects in one go.
Ben Murdochc5610432016-08-08 18:44:38 +01004658 __ Allocate(r9, r0, r9, r4, &runtime, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004659
4660 // r0 = address of new object(s) (tagged)
4661 // r2 = argument count (smi-tagged)
4662 // Get the arguments boilerplate from the current native context into r4.
4663 const int kNormalOffset =
4664 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
4665 const int kAliasedOffset =
4666 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
4667
4668 __ ldr(r4, NativeContextMemOperand());
4669 __ cmp(r6, Operand::Zero());
4670 __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
4671 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
4672
4673 // r0 = address of new object (tagged)
4674 // r2 = argument count (smi-tagged)
4675 // r4 = address of arguments map (tagged)
4676 // r6 = mapped parameter count (tagged)
4677 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
4678 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
4679 __ str(r9, FieldMemOperand(r0, JSObject::kPropertiesOffset));
4680 __ str(r9, FieldMemOperand(r0, JSObject::kElementsOffset));
4681
4682 // Set up the callee in-object property.
4683 __ AssertNotSmi(r1);
4684 __ str(r1, FieldMemOperand(r0, JSSloppyArgumentsObject::kCalleeOffset));
4685
4686 // Use the length (smi tagged) and set that as an in-object property too.
4687 __ AssertSmi(r5);
4688 __ str(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
4689
4690 // Set up the elements pointer in the allocated arguments object.
4691 // If we allocated a parameter map, r4 will point there, otherwise
4692 // it will point to the backing store.
4693 __ add(r4, r0, Operand(JSSloppyArgumentsObject::kSize));
4694 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
4695
4696 // r0 = address of new object (tagged)
4697 // r2 = argument count (tagged)
4698 // r4 = address of parameter map or backing store (tagged)
4699 // r6 = mapped parameter count (tagged)
4700 // Initialize parameter map. If there are no mapped arguments, we're done.
4701 Label skip_parameter_map;
4702 __ cmp(r6, Operand(Smi::FromInt(0)));
4703 // Move backing store address to r1, because it is
4704 // expected there when filling in the unmapped arguments.
4705 __ mov(r1, r4, LeaveCC, eq);
4706 __ b(eq, &skip_parameter_map);
4707
4708 __ LoadRoot(r5, Heap::kSloppyArgumentsElementsMapRootIndex);
4709 __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
4710 __ add(r5, r6, Operand(Smi::FromInt(2)));
4711 __ str(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
4712 __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
4713 __ add(r5, r4, Operand(r6, LSL, 1));
4714 __ add(r5, r5, Operand(kParameterMapHeaderSize));
4715 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
4716
4717 // Copy the parameter slots and the holes in the arguments.
4718 // We need to fill in mapped_parameter_count slots. They index the context,
4719 // where parameters are stored in reverse order, at
4720 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4721 // The mapped parameter thus need to get indices
4722 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4723 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4724 // We loop from right to left.
4725 Label parameters_loop, parameters_test;
4726 __ mov(r5, r6);
4727 __ add(r9, r2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4728 __ sub(r9, r9, Operand(r6));
4729 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
4730 __ add(r1, r4, Operand(r5, LSL, 1));
4731 __ add(r1, r1, Operand(kParameterMapHeaderSize));
4732
4733 // r1 = address of backing store (tagged)
4734 // r4 = address of parameter map (tagged), which is also the address of new
4735 // object + Heap::kSloppyArgumentsObjectSize (tagged)
4736 // r0 = temporary scratch (a.o., for address calculation)
4737 // r5 = loop variable (tagged)
4738 // ip = the hole value
4739 __ jmp(&parameters_test);
4740
4741 __ bind(&parameters_loop);
4742 __ sub(r5, r5, Operand(Smi::FromInt(1)));
4743 __ mov(r0, Operand(r5, LSL, 1));
4744 __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
4745 __ str(r9, MemOperand(r4, r0));
4746 __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
4747 __ str(ip, MemOperand(r1, r0));
4748 __ add(r9, r9, Operand(Smi::FromInt(1)));
4749 __ bind(&parameters_test);
4750 __ cmp(r5, Operand(Smi::FromInt(0)));
4751 __ b(ne, &parameters_loop);
4752
4753 // Restore r0 = new object (tagged) and r5 = argument count (tagged).
4754 __ sub(r0, r4, Operand(JSSloppyArgumentsObject::kSize));
4755 __ ldr(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
4756
4757 __ bind(&skip_parameter_map);
4758 // r0 = address of new object (tagged)
4759 // r1 = address of backing store (tagged)
4760 // r5 = argument count (tagged)
4761 // r6 = mapped parameter count (tagged)
4762 // r9 = scratch
4763 // Copy arguments header and remaining slots (if there are any).
4764 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
4765 __ str(r9, FieldMemOperand(r1, FixedArray::kMapOffset));
4766 __ str(r5, FieldMemOperand(r1, FixedArray::kLengthOffset));
4767
4768 Label arguments_loop, arguments_test;
4769 __ sub(r3, r3, Operand(r6, LSL, 1));
4770 __ jmp(&arguments_test);
4771
4772 __ bind(&arguments_loop);
4773 __ sub(r3, r3, Operand(kPointerSize));
4774 __ ldr(r4, MemOperand(r3, 0));
4775 __ add(r9, r1, Operand(r6, LSL, 1));
4776 __ str(r4, FieldMemOperand(r9, FixedArray::kHeaderSize));
4777 __ add(r6, r6, Operand(Smi::FromInt(1)));
4778
4779 __ bind(&arguments_test);
4780 __ cmp(r6, Operand(r5));
4781 __ b(lt, &arguments_loop);
4782
4783 // Return.
4784 __ Ret();
4785
4786 // Do the runtime call to allocate the arguments object.
4787 // r0 = address of new object (tagged)
4788 // r5 = argument count (tagged)
4789 __ bind(&runtime);
4790 __ Push(r1, r3, r5);
4791 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4792}
4793
4794
4795void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4796 // ----------- S t a t e -------------
4797 // -- r1 : function
4798 // -- cp : context
4799 // -- fp : frame pointer
4800 // -- lr : return address
4801 // -----------------------------------
4802 __ AssertFunction(r1);
4803
Ben Murdochc5610432016-08-08 18:44:38 +01004804 // Make r2 point to the JavaScript frame.
4805 __ mov(r2, fp);
4806 if (skip_stub_frame()) {
4807 // For Ignition we need to skip the handler/stub frame to reach the
4808 // JavaScript frame for the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01004809 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochc5610432016-08-08 18:44:38 +01004810 }
4811 if (FLAG_debug_code) {
4812 Label ok;
Ben Murdochda12d292016-06-02 14:46:10 +01004813 __ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004814 __ cmp(ip, r1);
Ben Murdochc5610432016-08-08 18:44:38 +01004815 __ b(eq, &ok);
4816 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
4817 __ bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004818 }
4819
4820 // Check if we have an arguments adaptor frame below the function frame.
4821 Label arguments_adaptor, arguments_done;
4822 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004823 __ ldr(ip, MemOperand(r3, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004824 __ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4825 __ b(eq, &arguments_adaptor);
4826 {
Ben Murdoch61f157c2016-09-16 13:49:30 +01004827 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004828 __ ldr(r0, FieldMemOperand(
Ben Murdoch61f157c2016-09-16 13:49:30 +01004829 r4, SharedFunctionInfo::kFormalParameterCountOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004830 __ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
4831 __ add(r2, r2,
4832 Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4833 }
4834 __ b(&arguments_done);
4835 __ bind(&arguments_adaptor);
4836 {
4837 __ ldr(r0, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
4838 __ add(r2, r3, Operand(r0, LSL, kPointerSizeLog2 - 1));
4839 __ add(r2, r2,
4840 Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4841 }
4842 __ bind(&arguments_done);
4843
4844 // ----------- S t a t e -------------
4845 // -- cp : context
4846 // -- r0 : number of rest parameters (tagged)
Ben Murdoch61f157c2016-09-16 13:49:30 +01004847 // -- r1 : function
Ben Murdoch097c5b22016-05-18 11:27:45 +01004848 // -- r2 : pointer to first rest parameters
4849 // -- lr : return address
4850 // -----------------------------------
4851
4852 // Allocate space for the strict arguments object plus the backing store.
4853 Label allocate, done_allocate;
Ben Murdoch61f157c2016-09-16 13:49:30 +01004854 __ mov(r6, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
4855 __ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
4856 __ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004857 __ bind(&done_allocate);
4858
4859 // Setup the elements array in r3.
4860 __ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
4861 __ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
4862 __ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
4863 __ add(r4, r3, Operand(FixedArray::kHeaderSize));
4864 {
4865 Label loop, done_loop;
4866 __ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
4867 __ bind(&loop);
4868 __ cmp(r4, r1);
4869 __ b(eq, &done_loop);
4870 __ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
4871 __ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
4872 __ add(r4, r4, Operand(1 * kPointerSize));
4873 __ b(&loop);
4874 __ bind(&done_loop);
4875 }
4876
4877 // Setup the strict arguments object in r4.
4878 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r1);
4879 __ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kMapOffset));
4880 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
4881 __ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kPropertiesOffset));
4882 __ str(r3, FieldMemOperand(r4, JSStrictArgumentsObject::kElementsOffset));
4883 __ str(r0, FieldMemOperand(r4, JSStrictArgumentsObject::kLengthOffset));
4884 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
4885 __ mov(r0, r4);
4886 __ Ret();
4887
Ben Murdoch61f157c2016-09-16 13:49:30 +01004888 // Fall back to %AllocateInNewSpace (if not too big).
4889 Label too_big_for_new_space;
Ben Murdoch097c5b22016-05-18 11:27:45 +01004890 __ bind(&allocate);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004891 __ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
4892 __ b(gt, &too_big_for_new_space);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004893 {
4894 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch61f157c2016-09-16 13:49:30 +01004895 __ SmiTag(r6);
4896 __ Push(r0, r2, r6);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004897 __ CallRuntime(Runtime::kAllocateInNewSpace);
4898 __ mov(r3, r0);
4899 __ Pop(r0, r2);
4900 }
4901 __ b(&done_allocate);
Ben Murdoch097c5b22016-05-18 11:27:45 +01004902
Ben Murdoch61f157c2016-09-16 13:49:30 +01004903 // Fall back to %NewStrictArguments.
4904 __ bind(&too_big_for_new_space);
4905 __ push(r1);
4906 __ TailCallRuntime(Runtime::kNewStrictArguments);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004907}
4908
4909
4910void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
4911 Register value = r0;
4912 Register slot = r2;
4913
4914 Register cell = r1;
4915 Register cell_details = r4;
4916 Register cell_value = r5;
4917 Register cell_value_map = r6;
4918 Register scratch = r9;
4919
4920 Register context = cp;
4921 Register context_temp = cell;
4922
4923 Label fast_heapobject_case, fast_smi_case, slow_case;
4924
4925 if (FLAG_debug_code) {
4926 __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
4927 __ Check(ne, kUnexpectedValue);
4928 }
4929
4930 // Go up the context chain to the script context.
4931 for (int i = 0; i < depth(); i++) {
4932 __ ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX));
4933 context = context_temp;
4934 }
4935
4936 // Load the PropertyCell at the specified slot.
4937 __ add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
4938 __ ldr(cell, ContextMemOperand(cell));
4939
4940 // Load PropertyDetails for the cell (actually only the cell_type and kind).
4941 __ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
4942 __ SmiUntag(cell_details);
4943 __ and_(cell_details, cell_details,
4944 Operand(PropertyDetails::PropertyCellTypeField::kMask |
4945 PropertyDetails::KindField::kMask |
4946 PropertyDetails::kAttributesReadOnlyMask));
4947
4948 // Check if PropertyCell holds mutable data.
4949 Label not_mutable_data;
4950 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
4951 PropertyCellType::kMutable) |
4952 PropertyDetails::KindField::encode(kData)));
4953 __ b(ne, &not_mutable_data);
4954 __ JumpIfSmi(value, &fast_smi_case);
4955
4956 __ bind(&fast_heapobject_case);
4957 __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
4958 // RecordWriteField clobbers the value register, so we copy it before the
4959 // call.
4960 __ mov(r4, Operand(value));
4961 __ RecordWriteField(cell, PropertyCell::kValueOffset, r4, scratch,
4962 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
4963 OMIT_SMI_CHECK);
4964 __ Ret();
4965
4966 __ bind(&not_mutable_data);
4967 // Check if PropertyCell value matches the new value (relevant for Constant,
4968 // ConstantType and Undefined cells).
4969 Label not_same_value;
4970 __ ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
4971 __ cmp(cell_value, value);
4972 __ b(ne, &not_same_value);
4973
4974 // Make sure the PropertyCell is not marked READ_ONLY.
4975 __ tst(cell_details, Operand(PropertyDetails::kAttributesReadOnlyMask));
4976 __ b(ne, &slow_case);
4977
4978 if (FLAG_debug_code) {
4979 Label done;
4980 // This can only be true for Constant, ConstantType and Undefined cells,
4981 // because we never store the_hole via this stub.
4982 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
4983 PropertyCellType::kConstant) |
4984 PropertyDetails::KindField::encode(kData)));
4985 __ b(eq, &done);
4986 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
4987 PropertyCellType::kConstantType) |
4988 PropertyDetails::KindField::encode(kData)));
4989 __ b(eq, &done);
4990 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
4991 PropertyCellType::kUndefined) |
4992 PropertyDetails::KindField::encode(kData)));
4993 __ Check(eq, kUnexpectedValue);
4994 __ bind(&done);
4995 }
4996 __ Ret();
4997 __ bind(&not_same_value);
4998
4999 // Check if PropertyCell contains data with constant type (and is not
5000 // READ_ONLY).
5001 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5002 PropertyCellType::kConstantType) |
5003 PropertyDetails::KindField::encode(kData)));
5004 __ b(ne, &slow_case);
5005
5006 // Now either both old and new values must be smis or both must be heap
5007 // objects with same map.
5008 Label value_is_heap_object;
5009 __ JumpIfNotSmi(value, &value_is_heap_object);
5010 __ JumpIfNotSmi(cell_value, &slow_case);
5011 // Old and new values are smis, no need for a write barrier here.
5012 __ bind(&fast_smi_case);
5013 __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
5014 __ Ret();
5015
5016 __ bind(&value_is_heap_object);
5017 __ JumpIfSmi(cell_value, &slow_case);
5018
5019 __ ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
5020 __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
5021 __ cmp(cell_value_map, scratch);
5022 __ b(eq, &fast_heapobject_case);
5023
5024 // Fallback to runtime.
5025 __ bind(&slow_case);
5026 __ SmiTag(slot);
5027 __ Push(slot, value);
5028 __ TailCallRuntime(is_strict(language_mode())
5029 ? Runtime::kStoreGlobalViaContext_Strict
5030 : Runtime::kStoreGlobalViaContext_Sloppy);
5031}
5032
5033
5034static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5035 return ref0.address() - ref1.address();
5036}
5037
5038
5039// Calls an API function. Allocates HandleScope, extracts returned value
5040// from handle and propagates exceptions. Restores context. stack_space
5041// - space to be unwound on exit (includes the call JS arguments space and
5042// the additional space allocated for the fast call).
5043static void CallApiFunctionAndReturn(MacroAssembler* masm,
5044 Register function_address,
5045 ExternalReference thunk_ref,
5046 int stack_space,
5047 MemOperand* stack_space_operand,
5048 MemOperand return_value_operand,
5049 MemOperand* context_restore_operand) {
5050 Isolate* isolate = masm->isolate();
5051 ExternalReference next_address =
5052 ExternalReference::handle_scope_next_address(isolate);
5053 const int kNextOffset = 0;
5054 const int kLimitOffset = AddressOffset(
5055 ExternalReference::handle_scope_limit_address(isolate), next_address);
5056 const int kLevelOffset = AddressOffset(
5057 ExternalReference::handle_scope_level_address(isolate), next_address);
5058
5059 DCHECK(function_address.is(r1) || function_address.is(r2));
5060
5061 Label profiler_disabled;
5062 Label end_profiler_check;
5063 __ mov(r9, Operand(ExternalReference::is_profiling_address(isolate)));
5064 __ ldrb(r9, MemOperand(r9, 0));
5065 __ cmp(r9, Operand(0));
5066 __ b(eq, &profiler_disabled);
5067
5068 // Additional parameter is the address of the actual callback.
5069 __ mov(r3, Operand(thunk_ref));
5070 __ jmp(&end_profiler_check);
5071
5072 __ bind(&profiler_disabled);
5073 __ Move(r3, function_address);
5074 __ bind(&end_profiler_check);
5075
5076 // Allocate HandleScope in callee-save registers.
5077 __ mov(r9, Operand(next_address));
5078 __ ldr(r4, MemOperand(r9, kNextOffset));
5079 __ ldr(r5, MemOperand(r9, kLimitOffset));
5080 __ ldr(r6, MemOperand(r9, kLevelOffset));
5081 __ add(r6, r6, Operand(1));
5082 __ str(r6, MemOperand(r9, kLevelOffset));
5083
5084 if (FLAG_log_timer_events) {
5085 FrameScope frame(masm, StackFrame::MANUAL);
5086 __ PushSafepointRegisters();
5087 __ PrepareCallCFunction(1, r0);
5088 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5089 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5090 1);
5091 __ PopSafepointRegisters();
5092 }
5093
5094 // Native call returns to the DirectCEntry stub which redirects to the
5095 // return address pushed on stack (could have moved after GC).
5096 // DirectCEntry stub itself is generated early and never moves.
5097 DirectCEntryStub stub(isolate);
5098 stub.GenerateCall(masm, r3);
5099
5100 if (FLAG_log_timer_events) {
5101 FrameScope frame(masm, StackFrame::MANUAL);
5102 __ PushSafepointRegisters();
5103 __ PrepareCallCFunction(1, r0);
5104 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5105 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5106 1);
5107 __ PopSafepointRegisters();
5108 }
5109
5110 Label promote_scheduled_exception;
5111 Label delete_allocated_handles;
5112 Label leave_exit_frame;
5113 Label return_value_loaded;
5114
5115 // load value from ReturnValue
5116 __ ldr(r0, return_value_operand);
5117 __ bind(&return_value_loaded);
5118 // No more valid handles (the result handle was the last one). Restore
5119 // previous handle scope.
5120 __ str(r4, MemOperand(r9, kNextOffset));
5121 if (__ emit_debug_code()) {
5122 __ ldr(r1, MemOperand(r9, kLevelOffset));
5123 __ cmp(r1, r6);
5124 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
5125 }
5126 __ sub(r6, r6, Operand(1));
5127 __ str(r6, MemOperand(r9, kLevelOffset));
5128 __ ldr(ip, MemOperand(r9, kLimitOffset));
5129 __ cmp(r5, ip);
5130 __ b(ne, &delete_allocated_handles);
5131
5132 // Leave the API exit frame.
5133 __ bind(&leave_exit_frame);
5134 bool restore_context = context_restore_operand != NULL;
5135 if (restore_context) {
5136 __ ldr(cp, *context_restore_operand);
5137 }
5138 // LeaveExitFrame expects unwind space to be in a register.
5139 if (stack_space_operand != NULL) {
5140 __ ldr(r4, *stack_space_operand);
5141 } else {
5142 __ mov(r4, Operand(stack_space));
5143 }
5144 __ LeaveExitFrame(false, r4, !restore_context, stack_space_operand != NULL);
5145
5146 // Check if the function scheduled an exception.
5147 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5148 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate)));
5149 __ ldr(r5, MemOperand(ip));
5150 __ cmp(r4, r5);
5151 __ b(ne, &promote_scheduled_exception);
5152
5153 __ mov(pc, lr);
5154
5155 // Re-throw by promoting a scheduled exception.
5156 __ bind(&promote_scheduled_exception);
5157 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5158
5159 // HandleScope limit has changed. Delete allocated extensions.
5160 __ bind(&delete_allocated_handles);
5161 __ str(r5, MemOperand(r9, kLimitOffset));
5162 __ mov(r4, r0);
5163 __ PrepareCallCFunction(1, r5);
5164 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5165 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5166 1);
5167 __ mov(r0, r4);
5168 __ jmp(&leave_exit_frame);
5169}
5170
Ben Murdochda12d292016-06-02 14:46:10 +01005171void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005172 // ----------- S t a t e -------------
5173 // -- r0 : callee
5174 // -- r4 : call_data
5175 // -- r2 : holder
5176 // -- r1 : api_function_address
5177 // -- cp : context
5178 // --
5179 // -- sp[0] : last argument
5180 // -- ...
5181 // -- sp[(argc - 1)* 4] : first argument
5182 // -- sp[argc * 4] : receiver
5183 // -----------------------------------
5184
5185 Register callee = r0;
5186 Register call_data = r4;
5187 Register holder = r2;
5188 Register api_function_address = r1;
5189 Register context = cp;
5190
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005191 typedef FunctionCallbackArguments FCA;
5192
5193 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5194 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5195 STATIC_ASSERT(FCA::kDataIndex == 4);
5196 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5197 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5198 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5199 STATIC_ASSERT(FCA::kHolderIndex == 0);
Ben Murdochc5610432016-08-08 18:44:38 +01005200 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
5201 STATIC_ASSERT(FCA::kArgsLength == 8);
5202
5203 // new target
5204 __ PushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005205
5206 // context save
5207 __ push(context);
Ben Murdochda12d292016-06-02 14:46:10 +01005208 if (!is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005209 // load context from callee
5210 __ ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5211 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005212
5213 // callee
5214 __ push(callee);
5215
5216 // call data
5217 __ push(call_data);
5218
5219 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005220 if (!call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005221 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5222 }
5223 // return value
5224 __ push(scratch);
5225 // return value default
5226 __ push(scratch);
5227 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005228 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005229 __ push(scratch);
5230 // holder
5231 __ push(holder);
5232
5233 // Prepare arguments.
5234 __ mov(scratch, sp);
5235
5236 // Allocate the v8::Arguments structure in the arguments' space since
5237 // it's not controlled by GC.
Ben Murdochc5610432016-08-08 18:44:38 +01005238 const int kApiStackSpace = 3;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005239
5240 FrameScope frame_scope(masm, StackFrame::MANUAL);
5241 __ EnterExitFrame(false, kApiStackSpace);
5242
5243 DCHECK(!api_function_address.is(r0) && !scratch.is(r0));
5244 // r0 = FunctionCallbackInfo&
5245 // Arguments is after the return address.
5246 __ add(r0, sp, Operand(1 * kPointerSize));
5247 // FunctionCallbackInfo::implicit_args_
5248 __ str(scratch, MemOperand(r0, 0 * kPointerSize));
Ben Murdochda12d292016-06-02 14:46:10 +01005249 // FunctionCallbackInfo::values_
5250 __ add(ip, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize));
5251 __ str(ip, MemOperand(r0, 1 * kPointerSize));
5252 // FunctionCallbackInfo::length_ = argc
5253 __ mov(ip, Operand(argc()));
5254 __ str(ip, MemOperand(r0, 2 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005255
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005256 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005257 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005258
5259 AllowExternalCallThatCantCauseGC scope(masm);
5260 MemOperand context_restore_operand(
5261 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5262 // Stores return the first js argument
5263 int return_value_offset = 0;
Ben Murdochda12d292016-06-02 14:46:10 +01005264 if (is_store()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005265 return_value_offset = 2 + FCA::kArgsLength;
5266 } else {
5267 return_value_offset = 2 + FCA::kReturnValueOffset;
5268 }
5269 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005270 int stack_space = 0;
Ben Murdochc5610432016-08-08 18:44:38 +01005271 MemOperand length_operand = MemOperand(sp, 3 * kPointerSize);
5272 MemOperand* stack_space_operand = &length_operand;
Ben Murdochda12d292016-06-02 14:46:10 +01005273 stack_space = argc() + FCA::kArgsLength + 1;
5274 stack_space_operand = NULL;
5275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005276 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
5277 stack_space_operand, return_value_operand,
5278 &context_restore_operand);
5279}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005280
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005281
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005282void CallApiGetterStub::Generate(MacroAssembler* masm) {
Ben Murdochc5610432016-08-08 18:44:38 +01005283 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
5284 // name below the exit frame to make GC aware of them.
5285 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
5286 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
5287 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
5288 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
5289 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
5290 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
5291 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
5292 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005293
Ben Murdochc5610432016-08-08 18:44:38 +01005294 Register receiver = ApiGetterDescriptor::ReceiverRegister();
5295 Register holder = ApiGetterDescriptor::HolderRegister();
5296 Register callback = ApiGetterDescriptor::CallbackRegister();
5297 Register scratch = r4;
5298 DCHECK(!AreAliased(receiver, holder, callback, scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005299
Ben Murdochc5610432016-08-08 18:44:38 +01005300 Register api_function_address = r2;
5301
5302 __ push(receiver);
5303 // Push data from AccessorInfo.
5304 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
5305 __ push(scratch);
5306 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5307 __ Push(scratch, scratch);
5308 __ mov(scratch, Operand(ExternalReference::isolate_address(isolate())));
5309 __ Push(scratch, holder);
5310 __ Push(Smi::FromInt(0)); // should_throw_on_error -> false
5311 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
5312 __ push(scratch);
Ben Murdoch097c5b22016-05-18 11:27:45 +01005313 // v8::PropertyCallbackInfo::args_ array and name handle.
5314 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5315
5316 // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
5317 __ mov(r0, sp); // r0 = Handle<Name>
5318 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = v8::PCI::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005319
5320 const int kApiStackSpace = 1;
5321 FrameScope frame_scope(masm, StackFrame::MANUAL);
5322 __ EnterExitFrame(false, kApiStackSpace);
5323
Ben Murdoch097c5b22016-05-18 11:27:45 +01005324 // Create v8::PropertyCallbackInfo object on the stack and initialize
5325 // it's args_ field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005326 __ str(r1, MemOperand(sp, 1 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005327 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = v8::PropertyCallbackInfo&
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005328
5329 ExternalReference thunk_ref =
5330 ExternalReference::invoke_accessor_getter_callback(isolate());
Ben Murdoch097c5b22016-05-18 11:27:45 +01005331
Ben Murdochc5610432016-08-08 18:44:38 +01005332 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
5333 __ ldr(api_function_address,
5334 FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
5335
Ben Murdoch097c5b22016-05-18 11:27:45 +01005336 // +3 is to skip prolog, return address and name handle.
5337 MemOperand return_value_operand(
5338 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005339 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005340 kStackUnwindSpace, NULL, return_value_operand, NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005341}
5342
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005343#undef __
5344
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005345} // namespace internal
5346} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005347
5348#endif // V8_TARGET_ARCH_ARM