blob: 31e3e95f0329c13444d6fcc3aae67ec456bd8df9 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochda12d292016-06-02 14:46:10 +01007#include "src/code-stubs.h"
8#include "src/api-arguments.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/bootstrapper.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011#include "src/codegen.h"
12#include "src/ic/handler-compiler.h"
13#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000016#include "src/regexp/jsregexp.h"
17#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040018#include "src/runtime/runtime.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010019
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020#include "src/arm/code-stubs-arm.h"
21
Kristian Monsen80d68ea2010-09-08 11:05:35 +010022namespace v8 {
23namespace internal {
24
25
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026static void InitializeArrayConstructorDescriptor(
27 Isolate* isolate, CodeStubDescriptor* descriptor,
28 int constant_stack_parameter_count) {
29 Address deopt_handler = Runtime::FunctionForId(
30 Runtime::kArrayConstructor)->entry;
31
32 if (constant_stack_parameter_count == 0) {
33 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
34 JS_FUNCTION_STUB_MODE);
35 } else {
36 descriptor->Initialize(r0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038 }
39}
40
41
42static void InitializeInternalArrayConstructorDescriptor(
43 Isolate* isolate, CodeStubDescriptor* descriptor,
44 int constant_stack_parameter_count) {
45 Address deopt_handler = Runtime::FunctionForId(
46 Runtime::kInternalArrayConstructor)->entry;
47
48 if (constant_stack_parameter_count == 0) {
49 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
50 JS_FUNCTION_STUB_MODE);
51 } else {
52 descriptor->Initialize(r0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 }
55}
56
57
58void ArrayNoArgumentConstructorStub::InitializeDescriptor(
59 CodeStubDescriptor* descriptor) {
60 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
61}
62
63
64void ArraySingleArgumentConstructorStub::InitializeDescriptor(
65 CodeStubDescriptor* descriptor) {
66 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
67}
68
69
70void ArrayNArgumentsConstructorStub::InitializeDescriptor(
71 CodeStubDescriptor* descriptor) {
72 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
73}
74
75
76void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
77 CodeStubDescriptor* descriptor) {
78 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
79}
80
Ben Murdochda12d292016-06-02 14:46:10 +010081void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
82 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
83 descriptor->Initialize(r0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
84}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085
86void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
87 CodeStubDescriptor* descriptor) {
88 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
89}
90
91
92void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
93 CodeStubDescriptor* descriptor) {
94 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
95}
96
97
Kristian Monsen80d68ea2010-09-08 11:05:35 +010098#define __ ACCESS_MASM(masm)
99
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100101 Condition cond);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100102static void EmitSmiNonsmiComparison(MacroAssembler* masm,
103 Register lhs,
104 Register rhs,
105 Label* lhs_not_nan,
106 Label* slow,
107 bool strict);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100108static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
109 Register lhs,
110 Register rhs);
111
112
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000113void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
114 ExternalReference miss) {
115 // Update the static counter each time a new code stub is generated.
116 isolate()->counters()->code_stubs()->Increment();
Ben Murdoch257744e2011-11-30 15:57:28 +0000117
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 {
121 // Call the runtime system in a fresh internal frame.
122 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
123 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000124 r0.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 // Push arguments
126 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000127 __ push(descriptor.GetRegisterParameter(i));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100128 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 __ CallExternalReference(miss, param_count);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100130 }
131
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100133}
134
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100135
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000136void DoubleToIStub::Generate(MacroAssembler* masm) {
137 Label out_of_range, only_low, negate, done;
138 Register input_reg = source();
139 Register result_reg = destination();
140 DCHECK(is_truncating());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100141
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 int double_offset = offset();
143 // Account for saved regs if input is sp.
144 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100145
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146 Register scratch = GetRegisterThatIsNotOneOf(input_reg, result_reg);
147 Register scratch_low =
148 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
149 Register scratch_high =
150 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch_low);
151 LowDwVfpRegister double_scratch = kScratchDoubleReg;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100152
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153 __ Push(scratch_high, scratch_low, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100154
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 if (!skip_fastpath()) {
156 // Load double input.
157 __ vldr(double_scratch, MemOperand(input_reg, double_offset));
158 __ vmov(scratch_low, scratch_high, double_scratch);
159
160 // Do fast-path convert from double to int.
161 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
162 __ vmov(result_reg, double_scratch.low());
163
164 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
165 __ sub(scratch, result_reg, Operand(1));
166 __ cmp(scratch, Operand(0x7ffffffe));
167 __ b(lt, &done);
168 } else {
169 // We've already done MacroAssembler::TryFastTruncatedDoubleToILoad, so we
170 // know exponent > 31, so we can skip the vcvt_s32_f64 which will saturate.
171 if (double_offset == 0) {
172 __ ldm(ia, input_reg, scratch_low.bit() | scratch_high.bit());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100173 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 __ ldr(scratch_low, MemOperand(input_reg, double_offset));
175 __ ldr(scratch_high, MemOperand(input_reg, double_offset + kIntSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100176 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100177 }
178
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 __ Ubfx(scratch, scratch_high,
180 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
181 // Load scratch with exponent - 1. This is faster than loading
182 // with exponent because Bias + 1 = 1024 which is an *ARM* immediate value.
183 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
184 __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
185 // If exponent is greater than or equal to 84, the 32 less significant
186 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
187 // the result is 0.
188 // Compare exponent with 84 (compare exponent - 1 with 83).
189 __ cmp(scratch, Operand(83));
190 __ b(ge, &out_of_range);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100191
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 // If we reach this code, 31 <= exponent <= 83.
193 // So, we don't have to handle cases where 0 <= exponent <= 20 for
194 // which we would need to shift right the high part of the mantissa.
195 // Scratch contains exponent - 1.
196 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
197 __ rsb(scratch, scratch, Operand(51), SetCC);
198 __ b(ls, &only_low);
199 // 21 <= exponent <= 51, shift scratch_low and scratch_high
200 // to generate the result.
201 __ mov(scratch_low, Operand(scratch_low, LSR, scratch));
202 // Scratch contains: 52 - exponent.
203 // We needs: exponent - 20.
204 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
205 __ rsb(scratch, scratch, Operand(32));
206 __ Ubfx(result_reg, scratch_high,
207 0, HeapNumber::kMantissaBitsInTopWord);
208 // Set the implicit 1 before the mantissa part in scratch_high.
209 __ orr(result_reg, result_reg,
210 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
211 __ orr(result_reg, scratch_low, Operand(result_reg, LSL, scratch));
212 __ b(&negate);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100213
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 __ bind(&out_of_range);
215 __ mov(result_reg, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100216 __ b(&done);
217
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000218 __ bind(&only_low);
219 // 52 <= exponent <= 83, shift only scratch_low.
220 // On entry, scratch contains: 52 - exponent.
221 __ rsb(scratch, scratch, Operand::Zero());
222 __ mov(result_reg, Operand(scratch_low, LSL, scratch));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100223
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 __ bind(&negate);
225 // If input was positive, scratch_high ASR 31 equals 0 and
226 // scratch_high LSR 31 equals zero.
227 // New result = (result eor 0) + 0 = result.
228 // If the input was negative, we have to negate the result.
229 // Input_high ASR 31 equals 0xffffffff and scratch_high LSR 31 equals 1.
230 // New result = (result eor 0xffffffff) + 1 = 0 - result.
231 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31));
232 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100233
234 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000235
236 __ Pop(scratch_high, scratch_low, scratch);
237 __ Ret();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100238}
239
240
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100241// Handle the case where the lhs and rhs are the same object.
242// Equality is almost reflexive (everything but NaN), so this is a test
243// for "identity and not NaN".
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100245 Condition cond) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100246 Label not_identical;
247 Label heap_number, return_equal;
248 __ cmp(r0, r1);
249 __ b(ne, &not_identical);
250
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
252 // so we do the second best thing - test it ourselves.
253 // They are both equal and they are not both Smis so both of them are not
254 // Smis. If it's not a heap number, then return equal.
255 if (cond == lt || cond == gt) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 // Call runtime on identical JSObjects.
257 __ CompareObjectType(r0, r4, r4, FIRST_JS_RECEIVER_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 __ b(ge, slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000259 // Call runtime on identical symbols since we need to throw a TypeError.
260 __ cmp(r4, Operand(SYMBOL_TYPE));
261 __ b(eq, slow);
262 // Call runtime on identical SIMD values since we must throw a TypeError.
263 __ cmp(r4, Operand(SIMD128_VALUE_TYPE));
264 __ b(eq, slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000265 } else {
266 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
267 __ b(eq, &heap_number);
268 // Comparing JS objects with <=, >= is complicated.
269 if (cond != eq) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 __ cmp(r4, Operand(FIRST_JS_RECEIVER_TYPE));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100271 __ b(ge, slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000272 // Call runtime on identical symbols since we need to throw a TypeError.
273 __ cmp(r4, Operand(SYMBOL_TYPE));
274 __ b(eq, slow);
275 // Call runtime on identical SIMD values since we must throw a TypeError.
276 __ cmp(r4, Operand(SIMD128_VALUE_TYPE));
277 __ b(eq, slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 // Normally here we fall through to return_equal, but undefined is
279 // special: (undefined == undefined) == true, but
280 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
281 if (cond == le || cond == ge) {
282 __ cmp(r4, Operand(ODDBALL_TYPE));
283 __ b(ne, &return_equal);
284 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
285 __ cmp(r0, r2);
286 __ b(ne, &return_equal);
287 if (cond == le) {
288 // undefined <= undefined should fail.
289 __ mov(r0, Operand(GREATER));
290 } else {
291 // undefined >= undefined should fail.
292 __ mov(r0, Operand(LESS));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100293 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000294 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100295 }
296 }
297 }
298
299 __ bind(&return_equal);
Steve Block1e0659c2011-05-24 12:43:12 +0100300 if (cond == lt) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100301 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves.
Steve Block1e0659c2011-05-24 12:43:12 +0100302 } else if (cond == gt) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100303 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves.
304 } else {
305 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves.
306 }
307 __ Ret();
308
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000309 // For less and greater we don't have to check for NaN since the result of
310 // x < x is false regardless. For the others here is some code to check
311 // for NaN.
312 if (cond != lt && cond != gt) {
313 __ bind(&heap_number);
314 // It is a heap number, so return non-equal if it's NaN and equal if it's
315 // not NaN.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100316
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 // The representation of NaN values has all exponent bits (52..62) set,
318 // and not all mantissa bits (0..51) clear.
319 // Read top bits of double representation (second word of value).
320 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
321 // Test that exponent bits are all set.
322 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
323 // NaNs have all-one exponents so they sign extend to -1.
324 __ cmp(r3, Operand(-1));
325 __ b(ne, &return_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100326
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000327 // Shift out flag and all exponent bits, retaining only mantissa.
328 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord));
329 // Or with all low-bits of mantissa.
330 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
331 __ orr(r0, r3, Operand(r2), SetCC);
332 // For equal we already have the right value in r0: Return zero (equal)
333 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
334 // not (it's a NaN). For <= and >= we need to load r0 with the failing
335 // value if it's a NaN.
336 if (cond != eq) {
337 // All-zero means Infinity means equal.
338 __ Ret(eq);
339 if (cond == le) {
340 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail.
341 } else {
342 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100343 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100344 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000345 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100346 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 // No fall through here.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100348
349 __ bind(&not_identical);
350}
351
352
353// See comment at call site.
354static void EmitSmiNonsmiComparison(MacroAssembler* masm,
355 Register lhs,
356 Register rhs,
357 Label* lhs_not_nan,
358 Label* slow,
359 bool strict) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100361 (lhs.is(r1) && rhs.is(r0)));
362
363 Label rhs_is_smi;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000364 __ JumpIfSmi(rhs, &rhs_is_smi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100365
366 // Lhs is a Smi. Check whether the rhs is a heap number.
367 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE);
368 if (strict) {
369 // If rhs is not a number and lhs is a Smi then strict equality cannot
370 // succeed. Return non-equal
371 // If rhs is r0 then there is already a non zero value in it.
372 if (!rhs.is(r0)) {
373 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
374 }
375 __ Ret(ne);
376 } else {
377 // Smi compared non-strictly with a non-Smi non-heap-number. Call
378 // the runtime.
379 __ b(ne, slow);
380 }
381
382 // Lhs is a smi, rhs is a number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 // Convert lhs to a double in d7.
384 __ SmiToDouble(d7, lhs);
385 // Load the double from rhs, tagged HeapNumber r0, to d6.
386 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100387
388 // We now have both loaded as doubles but we can skip the lhs nan check
389 // since it's a smi.
390 __ jmp(lhs_not_nan);
391
392 __ bind(&rhs_is_smi);
393 // Rhs is a smi. Check whether the non-smi lhs is a heap number.
394 __ CompareObjectType(lhs, r4, r4, HEAP_NUMBER_TYPE);
395 if (strict) {
396 // If lhs is not a number and rhs is a smi then strict equality cannot
397 // succeed. Return non-equal.
398 // If lhs is r0 then there is already a non zero value in it.
399 if (!lhs.is(r0)) {
400 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
401 }
402 __ Ret(ne);
403 } else {
404 // Smi compared non-strictly with a non-smi non-heap-number. Call
405 // the runtime.
406 __ b(ne, slow);
407 }
408
409 // Rhs is a smi, lhs is a heap number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000410 // Load the double from lhs, tagged HeapNumber r1, to d7.
411 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag);
412 // Convert rhs to a double in d6 .
413 __ SmiToDouble(d6, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100414 // Fall through to both_loaded_as_doubles.
415}
416
417
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100418// See comment at call site.
419static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
420 Register lhs,
421 Register rhs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000422 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100423 (lhs.is(r1) && rhs.is(r0)));
424
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000425 // If either operand is a JS object or an oddball value, then they are
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100426 // not equal since their pointers are different.
427 // There is no test for undetectability in strict equality.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000428 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100429 Label first_non_object;
430 // Get the type of the first operand into r2 and compare it with
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000431 // FIRST_JS_RECEIVER_TYPE.
432 __ CompareObjectType(rhs, r2, r2, FIRST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100433 __ b(lt, &first_non_object);
434
435 // Return non-zero (r0 is not zero)
436 Label return_not_equal;
437 __ bind(&return_not_equal);
438 __ Ret();
439
440 __ bind(&first_non_object);
441 // Check for oddballs: true, false, null, undefined.
442 __ cmp(r2, Operand(ODDBALL_TYPE));
443 __ b(eq, &return_not_equal);
444
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000445 __ CompareObjectType(lhs, r3, r3, FIRST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100446 __ b(ge, &return_not_equal);
447
448 // Check for oddballs: true, false, null, undefined.
449 __ cmp(r3, Operand(ODDBALL_TYPE));
450 __ b(eq, &return_not_equal);
451
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 // Now that we have the types we might as well check for
453 // internalized-internalized.
454 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
455 __ orr(r2, r2, Operand(r3));
456 __ tst(r2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
457 __ b(eq, &return_not_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100458}
459
460
461// See comment at call site.
462static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
463 Register lhs,
464 Register rhs,
465 Label* both_loaded_as_doubles,
466 Label* not_heap_numbers,
467 Label* slow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000468 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100469 (lhs.is(r1) && rhs.is(r0)));
470
471 __ CompareObjectType(rhs, r3, r2, HEAP_NUMBER_TYPE);
472 __ b(ne, not_heap_numbers);
473 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
474 __ cmp(r2, r3);
475 __ b(ne, slow); // First was a heap number, second wasn't. Go slow case.
476
477 // Both are heap numbers. Load them up then jump to the code we have
478 // for that.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag);
480 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100481 __ jmp(both_loaded_as_doubles);
482}
483
484
Ben Murdochda12d292016-06-02 14:46:10 +0100485// Fast negative check for internalized-to-internalized equality or receiver
486// equality. Also handles the undetectable receiver to null/undefined
487// comparison.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000488static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100489 Register lhs, Register rhs,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 Label* possible_strings,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100491 Label* runtime_call) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100493 (lhs.is(r1) && rhs.is(r0)));
494
495 // r2 is object type of rhs.
Ben Murdochda12d292016-06-02 14:46:10 +0100496 Label object_test, return_equal, return_unequal, undetectable;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000497 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100498 __ tst(r2, Operand(kIsNotStringMask));
499 __ b(ne, &object_test);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000500 __ tst(r2, Operand(kIsNotInternalizedMask));
501 __ b(ne, possible_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100502 __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100503 __ b(ge, runtime_call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 __ tst(r3, Operand(kIsNotInternalizedMask));
505 __ b(ne, possible_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100506
Ben Murdoch097c5b22016-05-18 11:27:45 +0100507 // Both are internalized. We already checked they weren't the same pointer so
508 // they are not equal. Return non-equal by returning the non-zero object
509 // pointer in r0.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100510 __ Ret();
511
512 __ bind(&object_test);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100513 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100514 __ ldr(r3, FieldMemOperand(rhs, HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100515 __ ldrb(r4, FieldMemOperand(r2, Map::kBitFieldOffset));
516 __ ldrb(r5, FieldMemOperand(r3, Map::kBitFieldOffset));
517 __ tst(r4, Operand(1 << Map::kIsUndetectable));
518 __ b(ne, &undetectable);
519 __ tst(r5, Operand(1 << Map::kIsUndetectable));
520 __ b(ne, &return_unequal);
521
522 __ CompareInstanceType(r2, r2, FIRST_JS_RECEIVER_TYPE);
523 __ b(lt, runtime_call);
524 __ CompareInstanceType(r3, r3, FIRST_JS_RECEIVER_TYPE);
525 __ b(lt, runtime_call);
526
527 __ bind(&return_unequal);
528 // Return non-equal by returning the non-zero object pointer in r0.
529 __ Ret();
530
531 __ bind(&undetectable);
532 __ tst(r5, Operand(1 << Map::kIsUndetectable));
533 __ b(eq, &return_unequal);
Ben Murdochda12d292016-06-02 14:46:10 +0100534
535 // If both sides are JSReceivers, then the result is false according to
536 // the HTML specification, which says that only comparisons with null or
537 // undefined are affected by special casing for document.all.
538 __ CompareInstanceType(r2, r2, ODDBALL_TYPE);
539 __ b(eq, &return_equal);
540 __ CompareInstanceType(r3, r3, ODDBALL_TYPE);
541 __ b(ne, &return_unequal);
542
543 __ bind(&return_equal);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100544 __ mov(r0, Operand(EQUAL));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100545 __ Ret();
546}
547
548
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000549static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
550 Register scratch,
551 CompareICState::State expected,
552 Label* fail) {
553 Label ok;
554 if (expected == CompareICState::SMI) {
555 __ JumpIfNotSmi(input, fail);
556 } else if (expected == CompareICState::NUMBER) {
557 __ JumpIfSmi(input, &ok);
558 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
559 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100560 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561 // We could be strict about internalized/non-internalized here, but as long as
562 // hydrogen doesn't care, the stub doesn't have to care either.
563 __ bind(&ok);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100564}
565
566
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000567// On entry r1 and r2 are the values to be compared.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100568// On exit r0 is 0, positive or negative to indicate the result of
569// the comparison.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
571 Register lhs = r1;
572 Register rhs = r0;
573 Condition cc = GetCondition();
574
575 Label miss;
576 CompareICStub_CheckInputType(masm, lhs, r2, left(), &miss);
577 CompareICStub_CheckInputType(masm, rhs, r3, right(), &miss);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100578
579 Label slow; // Call builtin.
580 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
581
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582 Label not_two_smis, smi_done;
583 __ orr(r2, r1, r0);
584 __ JumpIfNotSmi(r2, &not_two_smis);
585 __ mov(r1, Operand(r1, ASR, 1));
586 __ sub(r0, r1, Operand(r0, ASR, 1));
587 __ Ret();
588 __ bind(&not_two_smis);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100589
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100590 // NOTICE! This code is only reached after a smi-fast-case check, so
591 // it is certain that at least one operand isn't a smi.
592
593 // Handle the case where the objects are identical. Either returns the answer
594 // or goes to slow. Only falls through if the objects were not identical.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100595 EmitIdenticalObjectComparison(masm, &slow, cc);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100596
597 // If either is a Smi (we know that not both are), then they can only
598 // be strictly equal if the other is a HeapNumber.
599 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000601 __ and_(r2, lhs, Operand(rhs));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000602 __ JumpIfNotSmi(r2, &not_smis);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100603 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
604 // 1) Return the answer.
605 // 2) Go to slow.
606 // 3) Fall through to both_loaded_as_doubles.
607 // 4) Jump to lhs_not_nan.
608 // In cases 3 and 4 we have found out we were dealing with a number-number
609 // comparison. If VFP3 is supported the double values of the numbers have
610 // been loaded into d7 and d6. Otherwise, the double values have been loaded
611 // into r0, r1, r2, and r3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612 EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100613
614 __ bind(&both_loaded_as_doubles);
615 // The arguments have been converted to doubles and stored in d6 and d7, if
616 // VFP3 is supported, or in r0, r1, r2, and r3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617 __ bind(&lhs_not_nan);
618 Label no_nan;
619 // ARMv7 VFP3 instructions to implement double precision comparison.
620 __ VFPCompareAndSetFlags(d7, d6);
621 Label nan;
622 __ b(vs, &nan);
623 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
624 __ mov(r0, Operand(LESS), LeaveCC, lt);
625 __ mov(r0, Operand(GREATER), LeaveCC, gt);
626 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100627
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628 __ bind(&nan);
629 // If one of the sides was a NaN then the v flag is set. Load r0 with
630 // whatever it takes to make the comparison fail, since comparisons with NaN
631 // always fail.
632 if (cc == lt || cc == le) {
633 __ mov(r0, Operand(GREATER));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100634 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000635 __ mov(r0, Operand(LESS));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100636 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100638
639 __ bind(&not_smis);
640 // At this point we know we are dealing with two different objects,
641 // and neither of them is a Smi. The objects are in rhs_ and lhs_.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000642 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100643 // This returns non-equal for some object types, or falls through if it
644 // was not lucky.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100646 }
647
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648 Label check_for_internalized_strings;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100649 Label flat_string_check;
650 // Check for heap-number-heap-number comparison. Can jump to slow case,
651 // or load both doubles into r0, r1, r2, r3 and jump to the code that handles
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 // that case. If the inputs are not doubles then jumps to
653 // check_for_internalized_strings.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100654 // In this case r2 will contain the type of rhs_. Never falls through.
655 EmitCheckForTwoHeapNumbers(masm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000656 lhs,
657 rhs,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100658 &both_loaded_as_doubles,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000659 &check_for_internalized_strings,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100660 &flat_string_check);
661
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000662 __ bind(&check_for_internalized_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100663 // In the strict case the EmitStrictTwoHeapObjectCompare already took care of
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664 // internalized strings.
665 if (cc == eq && !strict()) {
666 // Returns an answer for two internalized strings or two detectable objects.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100667 // Otherwise jumps to string case or not both strings case.
668 // Assumes that r2 is the type of rhs_ on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000669 EmitCheckForInternalizedStringsOrObjects(
670 masm, lhs, rhs, &flat_string_check, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100671 }
672
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000673 // Check for both being sequential one-byte strings,
674 // and inline if that is the case.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100675 __ bind(&flat_string_check);
676
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000677 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r2, r3, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100678
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000679 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
680 r3);
681 if (cc == eq) {
682 StringHelper::GenerateFlatOneByteStringEquals(masm, lhs, rhs, r2, r3, r4);
Ben Murdoch257744e2011-11-30 15:57:28 +0000683 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4,
685 r5);
Ben Murdoch257744e2011-11-30 15:57:28 +0000686 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100687 // Never falls through to here.
688
689 __ bind(&slow);
690
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000691 if (cc == eq) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100692 {
693 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
694 __ Push(lhs, rhs);
695 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
696 }
697 // Turn true into 0 and false into some non-zero value.
698 STATIC_ASSERT(EQUAL == 0);
699 __ LoadRoot(r1, Heap::kTrueValueRootIndex);
700 __ sub(r0, r0, r1);
701 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100702 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100703 __ Push(lhs, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100704 int ncr; // NaN compare result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000705 if (cc == lt || cc == le) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100706 ncr = GREATER;
707 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708 DCHECK(cc == gt || cc == ge); // remaining cases
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100709 ncr = LESS;
710 }
711 __ mov(r0, Operand(Smi::FromInt(ncr)));
712 __ push(r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100713
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000714 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
715 // tagged as a small integer.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100716 __ TailCallRuntime(Runtime::kCompare);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000717 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100718
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719 __ bind(&miss);
720 GenerateMiss(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100721}
722
723
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100724void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
725 // We don't allow a GC during a store buffer overflow so there is no need to
726 // store the registers in any particular way, but we do have to store and
727 // restore them.
728 __ stm(db_w, sp, kCallerSaved | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729
730 const Register scratch = r1;
731
732 if (save_doubles()) {
733 __ SaveFPRegs(sp, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100734 }
735 const int argument_count = 1;
736 const int fp_argument_count = 0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100737
738 AllowExternalCallThatCantCauseGC scope(masm);
739 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740 __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100741 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000742 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100743 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000744 if (save_doubles()) {
745 __ RestoreFPRegs(sp, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100746 }
747 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0).
748}
749
750
Steve Block44f0eee2011-05-26 01:26:41 +0100751void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100752 const Register base = r1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000753 const Register exponent = MathPowTaggedDescriptor::exponent();
754 DCHECK(exponent.is(r2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100755 const Register heapnumbermap = r5;
756 const Register heapnumber = r0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000757 const DwVfpRegister double_base = d0;
758 const DwVfpRegister double_exponent = d1;
759 const DwVfpRegister double_result = d2;
760 const DwVfpRegister double_scratch = d3;
761 const SwVfpRegister single_scratch = s6;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100762 const Register scratch = r9;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763 const Register scratch2 = r4;
Steve Block44f0eee2011-05-26 01:26:41 +0100764
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100765 Label call_runtime, done, int_exponent;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000766 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100767 Label base_is_smi, unpack_exponent;
768 // The exponent and base are supplied as arguments on the stack.
769 // This can only happen if the stub is called from non-optimized code.
770 // Load input parameters from stack to double registers.
Steve Block44f0eee2011-05-26 01:26:41 +0100771 __ ldr(base, MemOperand(sp, 1 * kPointerSize));
772 __ ldr(exponent, MemOperand(sp, 0 * kPointerSize));
773
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100774 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +0100775
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100776 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
Steve Block44f0eee2011-05-26 01:26:41 +0100777 __ ldr(scratch, FieldMemOperand(base, JSObject::kMapOffset));
778 __ cmp(scratch, heapnumbermap);
779 __ b(ne, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100780
Ben Murdochc7cc0282012-03-05 14:35:55 +0000781 __ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100782 __ jmp(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000783
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100784 __ bind(&base_is_smi);
785 __ vmov(single_scratch, scratch);
786 __ vcvt_f64_s32(double_base, single_scratch);
787 __ bind(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000788
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100789 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Steve Block44f0eee2011-05-26 01:26:41 +0100790
Steve Block44f0eee2011-05-26 01:26:41 +0100791 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
792 __ cmp(scratch, heapnumbermap);
793 __ b(ne, &call_runtime);
Steve Block44f0eee2011-05-26 01:26:41 +0100794 __ vldr(double_exponent,
795 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100797 // Base is already in double_base.
798 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Steve Block44f0eee2011-05-26 01:26:41 +0100799
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100800 __ vldr(double_exponent,
801 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000802 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100803
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000804 if (exponent_type() != INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100805 Label int_exponent_convert;
806 // Detect integer exponents stored as double.
807 __ vcvt_u32_f64(single_scratch, double_exponent);
808 // We do not check for NaN or Infinity here because comparing numbers on
809 // ARM correctly distinguishes NaNs. We end up calling the built-in.
810 __ vcvt_f64_u32(double_scratch, single_scratch);
811 __ VFPCompareAndSetFlags(double_scratch, double_exponent);
812 __ b(eq, &int_exponent_convert);
813
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000814 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100815 // Detect square root case. Crankshaft detects constant +/-0.5 at
816 // compile time and uses DoMathPowHalf instead. We then skip this check
817 // for non-constant cases of +/-0.5 as these hardly occur.
818 Label not_plus_half;
819
820 // Test for 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000821 __ vmov(double_scratch, 0.5, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100822 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
823 __ b(ne, &not_plus_half);
824
825 // Calculates square root of base. Check for the special case of
826 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 __ vmov(double_scratch, -V8_INFINITY, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100828 __ VFPCompareAndSetFlags(double_base, double_scratch);
829 __ vneg(double_result, double_scratch, eq);
830 __ b(eq, &done);
831
832 // Add +0 to convert -0 to +0.
833 __ vadd(double_scratch, double_base, kDoubleRegZero);
834 __ vsqrt(double_result, double_scratch);
835 __ jmp(&done);
836
837 __ bind(&not_plus_half);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000838 __ vmov(double_scratch, -0.5, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100839 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
840 __ b(ne, &call_runtime);
841
842 // Calculates square root of base. Check for the special case of
843 // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000844 __ vmov(double_scratch, -V8_INFINITY, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100845 __ VFPCompareAndSetFlags(double_base, double_scratch);
846 __ vmov(double_result, kDoubleRegZero, eq);
847 __ b(eq, &done);
848
849 // Add +0 to convert -0 to +0.
850 __ vadd(double_scratch, double_base, kDoubleRegZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 __ vmov(double_result, 1.0, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100852 __ vsqrt(double_scratch, double_scratch);
853 __ vdiv(double_result, double_result, double_scratch);
854 __ jmp(&done);
855 }
856
857 __ push(lr);
858 {
859 AllowExternalCallThatCantCauseGC scope(masm);
860 __ PrepareCallCFunction(0, 2, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000861 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100862 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000863 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100864 0, 2);
865 }
866 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000867 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100868 __ jmp(&done);
869
870 __ bind(&int_exponent_convert);
871 __ vcvt_u32_f64(single_scratch, double_exponent);
872 __ vmov(scratch, single_scratch);
873 }
874
875 // Calculate power with integer exponent.
876 __ bind(&int_exponent);
877
878 // Get two copies of exponent in the registers scratch and exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879 if (exponent_type() == INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100880 __ mov(scratch, exponent);
881 } else {
882 // Exponent has previously been stored into scratch as untagged integer.
883 __ mov(exponent, scratch);
884 }
885 __ vmov(double_scratch, double_base); // Back up base.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ vmov(double_result, 1.0, scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100887
888 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 __ cmp(scratch, Operand::Zero());
890 __ mov(scratch2, Operand::Zero(), LeaveCC, mi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100891 __ sub(scratch, scratch2, scratch, LeaveCC, mi);
892
893 Label while_true;
894 __ bind(&while_true);
895 __ mov(scratch, Operand(scratch, ASR, 1), SetCC);
896 __ vmul(double_result, double_result, double_scratch, cs);
897 __ vmul(double_scratch, double_scratch, double_scratch, ne);
898 __ b(ne, &while_true);
899
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000900 __ cmp(exponent, Operand::Zero());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100901 __ b(ge, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 __ vmov(double_scratch, 1.0, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100903 __ vdiv(double_result, double_scratch, double_result);
904 // Test whether result is zero. Bail out to check for subnormal result.
905 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
906 __ VFPCompareAndSetFlags(double_result, 0.0);
907 __ b(ne, &done);
908 // double_exponent may not containe the exponent value if the input was a
909 // smi. We set it with exponent value before bailing out.
910 __ vmov(single_scratch, exponent);
911 __ vcvt_f64_s32(double_exponent, single_scratch);
912
913 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000914 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100915 // The arguments are still on the stack.
916 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000917 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100918
919 // The stub is called from non-optimized code, which expects the result
920 // as heap number in exponent.
921 __ bind(&done);
922 __ AllocateHeapNumber(
923 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
924 __ vstr(double_result,
925 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926 DCHECK(heapnumber.is(r0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100927 __ Ret(2);
928 } else {
929 __ push(lr);
930 {
931 AllowExternalCallThatCantCauseGC scope(masm);
932 __ PrepareCallCFunction(0, 2, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100934 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000935 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100936 0, 2);
937 }
938 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000939 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100940
941 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100942 __ Ret();
943 }
Steve Block44f0eee2011-05-26 01:26:41 +0100944}
945
946
947bool CEntryStub::NeedsImmovableCode() {
948 return true;
949}
950
951
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
953 CEntryStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000954 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
955 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
956 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
957 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000958 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000959 BinaryOpICStub::GenerateAheadOfTime(isolate);
960 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000961 StoreFastElementStub::GenerateAheadOfTime(isolate);
962 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000963}
964
965
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000966void CodeStub::GenerateFPStubs(Isolate* isolate) {
967 // Generate if not already in cache.
968 SaveFPRegsMode mode = kSaveFPRegs;
969 CEntryStub(isolate, 1, mode).GetCode();
970 StoreBufferOverflowStub(isolate, mode).GetCode();
971 isolate->set_fp_stubs_generated(true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100972}
973
974
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000975void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
976 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
977 stub.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100978}
979
980
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000981void CEntryStub::Generate(MacroAssembler* masm) {
982 // Called from JavaScript; parameters are on stack as if calling JS function.
983 // r0: number of arguments including receiver
984 // r1: pointer to builtin function
985 // fp: frame pointer (restored after C call)
986 // sp: stack pointer (restored as callee's sp after C call)
987 // cp: current context (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000988 //
989 // If argv_in_register():
990 // r2: pointer to the first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000991 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000992
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 __ mov(r5, Operand(r1));
994
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000995 if (argv_in_register()) {
996 // Move argv into the correct register.
997 __ mov(r1, Operand(r2));
998 } else {
999 // Compute the argv pointer in a callee-saved register.
1000 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
1001 __ sub(r1, r1, Operand(kPointerSize));
1002 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001003
1004 // Enter the exit frame that transitions from JavaScript to C++.
1005 FrameScope scope(masm, StackFrame::MANUAL);
1006 __ EnterExitFrame(save_doubles());
1007
1008 // Store a copy of argc in callee-saved registers for later.
1009 __ mov(r4, Operand(r0));
1010
1011 // r0, r4: number of arguments including receiver (C callee-saved)
1012 // r1: pointer to the first argument (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001013 // r5: pointer to builtin function (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001014
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001015 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1016 int frame_alignment_mask = frame_alignment - 1;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001017#if V8_HOST_ARCH_ARM
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001018 if (FLAG_debug_code) {
1019 if (frame_alignment > kPointerSize) {
1020 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001021 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001022 __ tst(sp, Operand(frame_alignment_mask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001023 __ b(eq, &alignment_as_expected);
1024 // Don't use Check here, as it will call Runtime_Abort re-entering here.
1025 __ stop("Unexpected alignment");
1026 __ bind(&alignment_as_expected);
1027 }
1028 }
1029#endif
1030
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031 // Call C built-in.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001032 int result_stack_size;
1033 if (result_size() <= 2) {
1034 // r0 = argc, r1 = argv, r2 = isolate
1035 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
1036 result_stack_size = 0;
1037 } else {
1038 DCHECK_EQ(3, result_size());
1039 // Allocate additional space for the result.
1040 result_stack_size =
1041 ((result_size() * kPointerSize) + frame_alignment_mask) &
1042 ~frame_alignment_mask;
1043 __ sub(sp, sp, Operand(result_stack_size));
1044
1045 // r0 = hidden result argument, r1 = argc, r2 = argv, r3 = isolate.
1046 __ mov(r3, Operand(ExternalReference::isolate_address(isolate())));
1047 __ mov(r2, Operand(r1));
1048 __ mov(r1, Operand(r0));
1049 __ mov(r0, Operand(sp));
1050 }
Steve Block44f0eee2011-05-26 01:26:41 +01001051
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001052 // To let the GC traverse the return address of the exit frames, we need to
1053 // know where the return address is. The CEntryStub is unmovable, so
1054 // we can store the address on the stack to be able to find it again and
1055 // we never have to restore it, because it will not change.
Steve Block1e0659c2011-05-24 12:43:12 +01001056 // Compute the return address in lr to return to after the jump below. Pc is
1057 // already at '+ 8' from the current instruction but return is after three
1058 // instructions so add another 4 to pc to get the return address.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001059 {
1060 // Prevent literal pool emission before return address.
1061 Assembler::BlockConstPoolScope block_const_pool(masm);
1062 __ add(lr, pc, Operand(4));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001063 __ str(lr, MemOperand(sp, result_stack_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001064 __ Call(r5);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001065 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001066 if (result_size() > 2) {
1067 DCHECK_EQ(3, result_size());
1068 // Read result values stored on stack.
Ben Murdochda12d292016-06-02 14:46:10 +01001069 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1070 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1071 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001072 }
1073 // Result returned in r0, r1:r0 or r2:r1:r0 - do not destroy these registers!
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001074
Ben Murdoch097c5b22016-05-18 11:27:45 +01001075 __ VFPEnsureFPSCRState(r3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001076
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 // Check result for exception sentinel.
1078 Label exception_returned;
1079 __ CompareRoot(r0, Heap::kExceptionRootIndex);
1080 __ b(eq, &exception_returned);
1081
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082 // Check that there is no pending exception, otherwise we
1083 // should have returned the exception sentinel.
1084 if (FLAG_debug_code) {
1085 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001086 ExternalReference pending_exception_address(
1087 Isolate::kPendingExceptionAddress, isolate());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001088 __ mov(r3, Operand(pending_exception_address));
1089 __ ldr(r3, MemOperand(r3));
1090 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001091 // Cannot use check here as it attempts to generate call into runtime.
1092 __ b(eq, &okay);
1093 __ stop("Unexpected pending exception");
1094 __ bind(&okay);
1095 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001096
1097 // Exit C frame and return.
1098 // r0:r1: result
1099 // sp: stack pointer
1100 // fp: frame pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101 Register argc;
1102 if (argv_in_register()) {
1103 // We don't want to pop arguments so set argc to no_reg.
1104 argc = no_reg;
1105 } else {
1106 // Callee-saved register r4 still holds argc.
1107 argc = r4;
1108 }
1109 __ LeaveExitFrame(save_doubles(), argc, true);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001110 __ mov(pc, lr);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001111
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001112 // Handling of exception.
1113 __ bind(&exception_returned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001114
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001115 ExternalReference pending_handler_context_address(
1116 Isolate::kPendingHandlerContextAddress, isolate());
1117 ExternalReference pending_handler_code_address(
1118 Isolate::kPendingHandlerCodeAddress, isolate());
1119 ExternalReference pending_handler_offset_address(
1120 Isolate::kPendingHandlerOffsetAddress, isolate());
1121 ExternalReference pending_handler_fp_address(
1122 Isolate::kPendingHandlerFPAddress, isolate());
1123 ExternalReference pending_handler_sp_address(
1124 Isolate::kPendingHandlerSPAddress, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001125
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001126 // Ask the runtime for help to determine the handler. This will set r0 to
1127 // contain the current pending exception, don't clobber it.
1128 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1129 isolate());
1130 {
1131 FrameScope scope(masm, StackFrame::MANUAL);
1132 __ PrepareCallCFunction(3, 0, r0);
1133 __ mov(r0, Operand(0));
1134 __ mov(r1, Operand(0));
1135 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
1136 __ CallCFunction(find_handler, 3);
1137 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001139 // Retrieve the handler context, SP and FP.
1140 __ mov(cp, Operand(pending_handler_context_address));
1141 __ ldr(cp, MemOperand(cp));
1142 __ mov(sp, Operand(pending_handler_sp_address));
1143 __ ldr(sp, MemOperand(sp));
1144 __ mov(fp, Operand(pending_handler_fp_address));
1145 __ ldr(fp, MemOperand(fp));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001146
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001147 // If the handler is a JS frame, restore the context to the frame. Note that
1148 // the context will be set to (cp == 0) for non-JS frames.
1149 __ cmp(cp, Operand(0));
1150 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001151
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001152 // Compute the handler entry address and jump to it.
1153 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1154 __ mov(r1, Operand(pending_handler_code_address));
1155 __ ldr(r1, MemOperand(r1));
1156 __ mov(r2, Operand(pending_handler_offset_address));
1157 __ ldr(r2, MemOperand(r2));
1158 __ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1159 if (FLAG_enable_embedded_constant_pool) {
1160 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r1);
1161 }
1162 __ add(pc, r1, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001163}
1164
1165
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001166void JSEntryStub::Generate(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001167 // r0: code entry
1168 // r1: function
1169 // r2: receiver
1170 // r3: argc
1171 // [sp+0]: argv
1172
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001173 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001174
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001175 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1176
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001177 // Called from C, so do not pop argc and args on exit (preserve sp)
1178 // No need to save register-passed args
1179 // Save callee-saved registers (incl. cp and fp), sp, and lr
1180 __ stm(db_w, sp, kCalleeSaved | lr.bit());
1181
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001182 // Save callee-saved vfp registers.
1183 __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
1184 // Set up the reserved register for 0.0.
1185 __ vmov(kDoubleRegZero, 0.0);
1186 __ VFPEnsureFPSCRState(r4);
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001187
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001188 // Get address of argv, see stm above.
1189 // r0: code entry
1190 // r1: function
1191 // r2: receiver
1192 // r3: argc
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001193
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001194 // Set up argv in r4.
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001195 int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001196 offset_to_argv += kNumDoubleCalleeSaved * kDoubleSize;
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001197 __ ldr(r4, MemOperand(sp, offset_to_argv));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001198
1199 // Push a frame with special values setup to mark it as an entry frame.
1200 // r0: code entry
1201 // r1: function
1202 // r2: receiver
1203 // r3: argc
1204 // r4: argv
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 int marker = type();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001206 if (FLAG_enable_embedded_constant_pool) {
1207 __ mov(r8, Operand::Zero());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001208 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001209 __ mov(r7, Operand(Smi::FromInt(marker)));
1210 __ mov(r6, Operand(Smi::FromInt(marker)));
Steve Block44f0eee2011-05-26 01:26:41 +01001211 __ mov(r5,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001212 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001213 __ ldr(r5, MemOperand(r5));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001214 __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1215 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001216 (FLAG_enable_embedded_constant_pool ? r8.bit() : 0) |
1217 ip.bit());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001218
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001219 // Set up frame pointer for the frame to be pushed.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001220 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1221
Ben Murdochb0fe1622011-05-05 13:52:32 +01001222 // If this is the outermost JS call, set js_entry_sp value.
Steve Block053d10c2011-06-13 19:13:29 +01001223 Label non_outermost_js;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001224 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001225 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
1226 __ ldr(r6, MemOperand(r5));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001227 __ cmp(r6, Operand::Zero());
Steve Block053d10c2011-06-13 19:13:29 +01001228 __ b(ne, &non_outermost_js);
1229 __ str(fp, MemOperand(r5));
1230 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1231 Label cont;
1232 __ b(&cont);
1233 __ bind(&non_outermost_js);
1234 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1235 __ bind(&cont);
1236 __ push(ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001237
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001238 // Jump to a faked try block that does the invoke, with a faked catch
1239 // block that sets the pending exception.
1240 __ jmp(&invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241
1242 // Block literal pool emission whilst taking the position of the handler
1243 // entry. This avoids making the assumption that literal pools are always
1244 // emitted after an instruction is emitted, rather than before.
1245 {
1246 Assembler::BlockConstPoolScope block_const_pool(masm);
1247 __ bind(&handler_entry);
1248 handler_offset_ = handler_entry.pos();
1249 // Caught exception: Store result (exception) in the pending exception
1250 // field in the JSEnv and return a failure sentinel. Coming in here the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251 // fp will be invalid because the PushStackHandler below sets it to 0 to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001252 // signal the existence of the JSEntry frame.
1253 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1254 isolate())));
1255 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001256 __ str(r0, MemOperand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001257 __ LoadRoot(r0, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001258 __ b(&exit);
1259
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001260 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001261 __ bind(&invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001262 // Must preserve r0-r4, r5-r6 are available.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001263 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001264 // If an exception not caught by another handler occurs, this handler
1265 // returns control to the code after the bl(&invoke) above, which
1266 // restores all kCalleeSaved registers (including cp and fp) to their
1267 // saved values before returning a failure to C.
1268
1269 // Clear any pending exceptions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001270 __ mov(r5, Operand(isolate()->factory()->the_hole_value()));
Ben Murdoch589d6972011-11-30 16:04:58 +00001271 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001272 isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001273 __ str(r5, MemOperand(ip));
1274
1275 // Invoke the function by calling through JS entry trampoline builtin.
1276 // Notice that we cannot store a reference to the trampoline code directly in
1277 // this stub, because runtime stubs are not traversed when doing GC.
1278
1279 // Expected registers by Builtins::JSEntryTrampoline
1280 // r0: code entry
1281 // r1: function
1282 // r2: receiver
1283 // r3: argc
1284 // r4: argv
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001285 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01001286 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001287 isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001288 __ mov(ip, Operand(construct_entry));
1289 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001290 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001291 __ mov(ip, Operand(entry));
1292 }
1293 __ ldr(ip, MemOperand(ip)); // deref address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001294 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001295
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001296 // Branch and link to JSEntryTrampoline.
1297 __ Call(ip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001298
Steve Block053d10c2011-06-13 19:13:29 +01001299 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001300 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001301
1302 __ bind(&exit); // r0 holds result
Steve Block053d10c2011-06-13 19:13:29 +01001303 // Check if the current stack frame is marked as the outermost JS frame.
1304 Label non_outermost_js_2;
1305 __ pop(r5);
1306 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1307 __ b(ne, &non_outermost_js_2);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001308 __ mov(r6, Operand::Zero());
Steve Block053d10c2011-06-13 19:13:29 +01001309 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
1310 __ str(r6, MemOperand(r5));
1311 __ bind(&non_outermost_js_2);
Steve Block053d10c2011-06-13 19:13:29 +01001312
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001313 // Restore the top frame descriptors from the stack.
1314 __ pop(r3);
Steve Block44f0eee2011-05-26 01:26:41 +01001315 __ mov(ip,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001316 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001317 __ str(r3, MemOperand(ip));
1318
1319 // Reset the stack to the callee saved registers.
1320 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1321
1322 // Restore callee-saved registers and return.
1323#ifdef DEBUG
1324 if (FLAG_debug_code) {
1325 __ mov(lr, Operand(pc));
1326 }
1327#endif
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001328
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329 // Restore callee-saved vfp registers.
1330 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001331
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001332 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
1333}
1334
1335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336void InstanceOfStub::Generate(MacroAssembler* masm) {
1337 Register const object = r1; // Object (lhs).
1338 Register const function = r0; // Function (rhs).
1339 Register const object_map = r2; // Map of {object}.
1340 Register const function_map = r3; // Map of {function}.
1341 Register const function_prototype = r4; // Prototype of {function}.
1342 Register const scratch = r5;
Steve Block1e0659c2011-05-24 12:43:12 +01001343
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001344 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
1345 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Steve Block1e0659c2011-05-24 12:43:12 +01001346
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001347 // Check if {object} is a smi.
1348 Label object_is_smi;
1349 __ JumpIfSmi(object, &object_is_smi);
Steve Block1e0659c2011-05-24 12:43:12 +01001350
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001351 // Lookup the {function} and the {object} map in the global instanceof cache.
1352 // Note: This is safe because we clear the global instanceof cache whenever
1353 // we change the prototype of any object.
1354 Label fast_case, slow_case;
1355 __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
1356 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1357 __ b(ne, &fast_case);
1358 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
1359 __ b(ne, &fast_case);
1360 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
1361 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001362
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001363 // If {object} is a smi we can safely return false if {function} is a JS
1364 // function, otherwise we have to miss to the runtime and throw an exception.
1365 __ bind(&object_is_smi);
1366 __ JumpIfSmi(function, &slow_case);
1367 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1368 __ b(ne, &slow_case);
1369 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
1370 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001371
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001372 // Fast-case: The {function} must be a valid JSFunction.
1373 __ bind(&fast_case);
1374 __ JumpIfSmi(function, &slow_case);
1375 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1376 __ b(ne, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001377
Ben Murdochda12d292016-06-02 14:46:10 +01001378 // Go to the runtime if the function is not a constructor.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001379 __ ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01001380 __ tst(scratch, Operand(1 << Map::kIsConstructor));
1381 __ b(eq, &slow_case);
1382
1383 // Ensure that {function} has an instance prototype.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001384 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1385 __ b(ne, &slow_case);
Steve Block1e0659c2011-05-24 12:43:12 +01001386
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001387 // Get the "prototype" (or initial map) of the {function}.
1388 __ ldr(function_prototype,
1389 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1390 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001391
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001392 // Resolve the prototype if the {function} has an initial map. Afterwards the
1393 // {function_prototype} will be either the JSReceiver prototype object or the
1394 // hole value, which means that no instances of the {function} were created so
1395 // far and hence we should return false.
1396 Label function_prototype_valid;
1397 __ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE);
1398 __ b(ne, &function_prototype_valid);
1399 __ ldr(function_prototype,
1400 FieldMemOperand(function_prototype, Map::kPrototypeOffset));
1401 __ bind(&function_prototype_valid);
1402 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001403
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001404 // Update the global instanceof cache with the current {object} map and
1405 // {function}. The cached answer will be set when it is known below.
1406 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1407 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01001408
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001409 // Loop through the prototype chain looking for the {function} prototype.
1410 // Assume true, and change to false if not found.
1411 Register const object_instance_type = function_map;
1412 Register const map_bit_field = function_map;
1413 Register const null = scratch;
1414 Register const result = r0;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001415
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001416 Label done, loop, fast_runtime_fallback;
1417 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1418 __ LoadRoot(null, Heap::kNullValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001419 __ bind(&loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001420
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001421 // Check if the object needs to be access checked.
1422 __ ldrb(map_bit_field, FieldMemOperand(object_map, Map::kBitFieldOffset));
1423 __ tst(map_bit_field, Operand(1 << Map::kIsAccessCheckNeeded));
1424 __ b(ne, &fast_runtime_fallback);
1425 // Check if the current object is a Proxy.
1426 __ CompareInstanceType(object_map, object_instance_type, JS_PROXY_TYPE);
1427 __ b(eq, &fast_runtime_fallback);
Steve Block1e0659c2011-05-24 12:43:12 +01001428
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001429 __ ldr(object, FieldMemOperand(object_map, Map::kPrototypeOffset));
1430 __ cmp(object, function_prototype);
1431 __ b(eq, &done);
1432 __ cmp(object, null);
1433 __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
1434 __ b(ne, &loop);
1435 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1436 __ bind(&done);
1437 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
1438 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001439
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 // Found Proxy or access check needed: Call the runtime
1441 __ bind(&fast_runtime_fallback);
1442 __ Push(object, function_prototype);
1443 // Invalidate the instanceof cache.
1444 __ Move(scratch, Smi::FromInt(0));
1445 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
1446 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
Steve Block1e0659c2011-05-24 12:43:12 +01001447
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001448 // Slow-case: Call the %InstanceOf runtime function.
1449 __ bind(&slow_case);
1450 __ Push(object, function);
Ben Murdochda12d292016-06-02 14:46:10 +01001451 __ TailCallRuntime(is_es6_instanceof() ? Runtime::kOrdinaryHasInstance
1452 : Runtime::kInstanceOf);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001453}
1454
1455
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001456void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1457 Label miss;
1458 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001459 // Ensure that the vector and slot registers won't be clobbered before
1460 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001461 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(),
1462 LoadWithVectorDescriptor::SlotRegister()));
Steve Block1e0659c2011-05-24 12:43:12 +01001463
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001464 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r4,
1465 r5, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001466 __ bind(&miss);
1467 PropertyAccessCompiler::TailCallBuiltin(
1468 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1469}
Steve Block1e0659c2011-05-24 12:43:12 +01001470
1471
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001472void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1473 // Return address is in lr.
1474 Label miss;
1475
1476 Register receiver = LoadDescriptor::ReceiverRegister();
1477 Register index = LoadDescriptor::NameRegister();
1478 Register scratch = r5;
1479 Register result = r0;
1480 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001481 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
1482 result.is(LoadWithVectorDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001483
1484 // StringCharAtGenerator doesn't use the result register until it's passed
1485 // the different miss possibilities. If it did, we would have a conflict
1486 // when FLAG_vector_ics is true.
1487 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1488 &miss, // When not a string.
1489 &miss, // When not a number.
1490 &miss, // When index out of range.
1491 STRING_INDEX_IS_ARRAY_INDEX,
1492 RECEIVER_IS_STRING);
1493 char_at_generator.GenerateFast(masm);
1494 __ Ret();
1495
1496 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001497 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001498
1499 __ bind(&miss);
1500 PropertyAccessCompiler::TailCallBuiltin(
1501 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1502}
1503
1504
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001505void RegExpExecStub::Generate(MacroAssembler* masm) {
1506 // Just jump directly to runtime if native RegExp is not selected at compile
1507 // time or if regexp entry in generated code is turned off runtime switch or
1508 // at compilation.
1509#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001510 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001511#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001512
1513 // Stack frame on entry.
1514 // sp[0]: last_match_info (expected JSArray)
1515 // sp[4]: previous index
1516 // sp[8]: subject string
1517 // sp[12]: JSRegExp object
1518
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001519 const int kLastMatchInfoOffset = 0 * kPointerSize;
1520 const int kPreviousIndexOffset = 1 * kPointerSize;
1521 const int kSubjectOffset = 2 * kPointerSize;
1522 const int kJSRegExpOffset = 3 * kPointerSize;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001523
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001524 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001525 // Allocation of registers for this function. These are in callee save
1526 // registers and will be preserved by the call to the native RegExp code, as
1527 // this code is called using the normal C calling convention. When calling
1528 // directly from generated code the native RegExp code will not do a GC and
1529 // therefore the content of these registers are safe to use after the call.
1530 Register subject = r4;
1531 Register regexp_data = r5;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001532 Register last_match_info_elements = no_reg; // will be r6;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001533
1534 // Ensure that a RegExp stack is allocated.
1535 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001536 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001537 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001538 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001539 __ mov(r0, Operand(address_of_regexp_stack_memory_size));
1540 __ ldr(r0, MemOperand(r0, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001541 __ cmp(r0, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001542 __ b(eq, &runtime);
1543
1544 // Check that the first argument is a JSRegExp object.
1545 __ ldr(r0, MemOperand(sp, kJSRegExpOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001546 __ JumpIfSmi(r0, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001547 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
1548 __ b(ne, &runtime);
1549
1550 // Check that the RegExp has been compiled (data contains a fixed array).
1551 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
1552 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001553 __ SmiTst(regexp_data);
1554 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001555 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001556 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001557 }
1558
1559 // regexp_data: RegExp data (FixedArray)
1560 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1561 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
1562 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
1563 __ b(ne, &runtime);
1564
1565 // regexp_data: RegExp data (FixedArray)
1566 // Check that the number of captures fit in the static offsets vector buffer.
1567 __ ldr(r2,
1568 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001569 // Check (number_of_captures + 1) * 2 <= offsets vector size
1570 // Or number_of_captures * 2 <= offsets vector size - 2
1571 // Multiplying by 2 comes for free since r2 is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001572 STATIC_ASSERT(kSmiTag == 0);
1573 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001574 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1575 __ cmp(r2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001576 __ b(hi, &runtime);
1577
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001578 // Reset offset for possibly sliced string.
1579 __ mov(r9, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001580 __ ldr(subject, MemOperand(sp, kSubjectOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001581 __ JumpIfSmi(subject, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582 __ mov(r3, subject); // Make a copy of the original subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001583 // subject: subject string
1584 // r3: subject string
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001585 // regexp_data: RegExp data (FixedArray)
1586 // Handle subject string according to its encoding and representation:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001587 // (1) Sequential string? If yes, go to (4).
1588 // (2) Sequential or cons? If not, go to (5).
1589 // (3) Cons string. If the string is flat, replace subject with first string
1590 // and go to (1). Otherwise bail out to runtime.
1591 // (4) Sequential string. Load regexp code according to encoding.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001592 // (E) Carry on.
1593 /// [...]
1594
1595 // Deferred code at the end of the stub:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001596 // (5) Long external string? If not, go to (7).
1597 // (6) External string. Make it, offset-wise, look like a sequential string.
1598 // Go to (4).
1599 // (7) Short external string or not a string? If yes, bail out to runtime.
1600 // (8) Sliced string. Replace subject with parent. Go to (1).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001601
Ben Murdoch097c5b22016-05-18 11:27:45 +01001602 Label seq_string /* 4 */, external_string /* 6 */, check_underlying /* 1 */,
1603 not_seq_nor_cons /* 5 */, not_long_external /* 7 */;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001604
Ben Murdoch097c5b22016-05-18 11:27:45 +01001605 __ bind(&check_underlying);
1606 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
1607 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
1608
1609 // (1) Sequential string? If yes, go to (4).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001610 __ and_(r1,
1611 r0,
1612 Operand(kIsNotStringMask |
1613 kStringRepresentationMask |
1614 kShortExternalStringMask),
1615 SetCC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001616 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001617 __ b(eq, &seq_string); // Go to (4).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001618
Ben Murdoch097c5b22016-05-18 11:27:45 +01001619 // (2) Sequential or cons? If not, go to (5).
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001620 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1621 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001622 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1623 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001624 __ cmp(r1, Operand(kExternalStringTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001625 __ b(ge, &not_seq_nor_cons); // Go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001626
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001627 // (3) Cons string. Check that it's flat.
1628 // Replace subject with first string and reload instance type.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001629 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001630 __ CompareRoot(r0, Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001631 __ b(ne, &runtime);
1632 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001633 __ jmp(&check_underlying);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001634
Ben Murdoch097c5b22016-05-18 11:27:45 +01001635 // (4) Sequential string. Load regexp code according to encoding.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001636 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001637 // subject: sequential subject string (or look-alike, external string)
1638 // r3: original subject string
1639 // Load previous index and check range before r3 is overwritten. We have to
1640 // use r3 instead of subject here because subject might have been only made
1641 // to look like a sequential string when it actually is an external string.
1642 __ ldr(r1, MemOperand(sp, kPreviousIndexOffset));
1643 __ JumpIfNotSmi(r1, &runtime);
1644 __ ldr(r3, FieldMemOperand(r3, String::kLengthOffset));
1645 __ cmp(r3, Operand(r1));
1646 __ b(ls, &runtime);
1647 __ SmiUntag(r1);
1648
1649 STATIC_ASSERT(4 == kOneByteStringTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001650 STATIC_ASSERT(kTwoByteStringTag == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001651 __ and_(r0, r0, Operand(kStringEncodingMask));
1652 __ mov(r3, Operand(r0, ASR, 2), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001653 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset),
1654 ne);
1655 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001656
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001657 // (E) Carry on. String handling is done.
1658 // r6: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001659 // Check that the irregexp code has been generated for the actual string
1660 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00001661 // a smi (code flushing support).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001662 __ JumpIfSmi(r6, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001663
1664 // r1: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665 // r3: encoding of subject string (1 if one_byte, 0 if two_byte);
1666 // r6: code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001667 // subject: Subject string
1668 // regexp_data: RegExp data (FixedArray)
1669 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001671
Steve Block44f0eee2011-05-26 01:26:41 +01001672 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001673 const int kRegExpExecuteArguments = 9;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001674 const int kParameterRegisters = 4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001675 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001676
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001677 // Stack pointer now points to cell where return address is to be written.
1678 // Arguments are before that on the stack or in registers.
1679
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001680 // Argument 9 (sp[20]): Pass current isolate address.
1681 __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
1682 __ str(r0, MemOperand(sp, 5 * kPointerSize));
1683
1684 // Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
1685 __ mov(r0, Operand(1));
Steve Block44f0eee2011-05-26 01:26:41 +01001686 __ str(r0, MemOperand(sp, 4 * kPointerSize));
1687
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001688 // Argument 7 (sp[12]): Start (high end) of backtracking stack memory area.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001689 __ mov(r0, Operand(address_of_regexp_stack_memory_address));
1690 __ ldr(r0, MemOperand(r0, 0));
1691 __ mov(r2, Operand(address_of_regexp_stack_memory_size));
1692 __ ldr(r2, MemOperand(r2, 0));
1693 __ add(r0, r0, Operand(r2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001694 __ str(r0, MemOperand(sp, 3 * kPointerSize));
1695
1696 // Argument 6: Set the number of capture registers to zero to force global
1697 // regexps to behave as non-global. This does not affect non-global regexps.
1698 __ mov(r0, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001699 __ str(r0, MemOperand(sp, 2 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001700
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001701 // Argument 5 (sp[4]): static offsets vector buffer.
Steve Block44f0eee2011-05-26 01:26:41 +01001702 __ mov(r0,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001703 Operand(ExternalReference::address_of_static_offsets_vector(
1704 isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001705 __ str(r0, MemOperand(sp, 1 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001706
1707 // For arguments 4 and 3 get string length, calculate start of string data and
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001708 // calculate the shift of the index (0 for one-byte and 1 for two-byte).
1709 __ add(r7, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001710 __ eor(r3, r3, Operand(1));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001711 // Load the length from the original subject string from the previous stack
1712 // frame. Therefore we have to use fp, which points exactly to two pointer
1713 // sizes below the previous sp. (Because creating a new stack frame pushes
1714 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
Ben Murdoch589d6972011-11-30 16:04:58 +00001715 __ ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001716 // If slice offset is not 0, load the length from the original sliced string.
1717 // Argument 4, r3: End of string data
1718 // Argument 3, r2: Start of string data
1719 // Prepare start and end index of the input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720 __ add(r9, r7, Operand(r9, LSL, r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001721 __ add(r2, r9, Operand(r1, LSL, r3));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001722
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001723 __ ldr(r7, FieldMemOperand(subject, String::kLengthOffset));
1724 __ SmiUntag(r7);
1725 __ add(r3, r9, Operand(r7, LSL, r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001726
1727 // Argument 2 (r1): Previous index.
1728 // Already there
1729
1730 // Argument 1 (r0): Subject string.
Ben Murdoch589d6972011-11-30 16:04:58 +00001731 __ mov(r0, subject);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001732
1733 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001734 __ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1735 DirectCEntryStub stub(isolate());
1736 stub.GenerateCall(masm, r6);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001737
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001738 __ LeaveExitFrame(false, no_reg, true);
1739
1740 last_match_info_elements = r6;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001741
1742 // r0: result
1743 // subject: subject string (callee saved)
1744 // regexp_data: RegExp data (callee saved)
1745 // last_match_info_elements: Last match info elements (callee saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001746 // Check the result.
1747 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001748 __ cmp(r0, Operand(1));
1749 // We expect exactly one result since we force the called regexp to behave
1750 // as non-global.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001751 __ b(eq, &success);
1752 Label failure;
Ben Murdoch589d6972011-11-30 16:04:58 +00001753 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001754 __ b(eq, &failure);
Ben Murdoch589d6972011-11-30 16:04:58 +00001755 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001756 // If not exception it can only be retry. Handle that in the runtime system.
1757 __ b(ne, &runtime);
1758 // Result must now be exception. If there is no pending exception already a
1759 // stack overflow (on the backtrack stack) was detected in RegExp code but
1760 // haven't created the exception yet. Handle that in the runtime system.
1761 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001762 __ mov(r1, Operand(isolate()->factory()->the_hole_value()));
Ben Murdoch589d6972011-11-30 16:04:58 +00001763 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001764 isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001765 __ ldr(r0, MemOperand(r2, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00001766 __ cmp(r0, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001767 __ b(eq, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001768
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001769 // For exception, throw the exception again.
1770 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001771
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001772 __ bind(&failure);
1773 // For failure and exception return null.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001774 __ mov(r0, Operand(isolate()->factory()->null_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001775 __ add(sp, sp, Operand(4 * kPointerSize));
1776 __ Ret();
1777
1778 // Process the result from the native regexp code.
1779 __ bind(&success);
1780 __ ldr(r1,
1781 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
1782 // Calculate number of capture registers (number_of_captures + 1) * 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001783 // Multiplying by 2 comes for free since r1 is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001784 STATIC_ASSERT(kSmiTag == 0);
1785 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1786 __ add(r1, r1, Operand(2)); // r1 was a smi.
1787
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001788 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
1789 __ JumpIfSmi(r0, &runtime);
1790 __ CompareObjectType(r0, r2, r2, JS_ARRAY_TYPE);
1791 __ b(ne, &runtime);
1792 // Check that the JSArray is in fast case.
1793 __ ldr(last_match_info_elements,
1794 FieldMemOperand(r0, JSArray::kElementsOffset));
1795 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
1796 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
1797 __ b(ne, &runtime);
1798 // Check that the last match info has space for the capture registers and the
1799 // additional information.
1800 __ ldr(r0,
1801 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
1802 __ add(r2, r1, Operand(RegExpImpl::kLastMatchOverhead));
1803 __ cmp(r2, Operand::SmiUntag(r0));
1804 __ b(gt, &runtime);
1805
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001806 // r1: number of capture registers
1807 // r4: subject string
1808 // Store the capture count.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809 __ SmiTag(r2, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001810 __ str(r2, FieldMemOperand(last_match_info_elements,
1811 RegExpImpl::kLastCaptureCountOffset));
1812 // Store last subject and last input.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001813 __ str(subject,
1814 FieldMemOperand(last_match_info_elements,
1815 RegExpImpl::kLastSubjectOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001816 __ mov(r2, subject);
1817 __ RecordWriteField(last_match_info_elements,
1818 RegExpImpl::kLastSubjectOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001819 subject,
1820 r3,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001821 kLRHasNotBeenSaved,
1822 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001823 __ mov(subject, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001824 __ str(subject,
1825 FieldMemOperand(last_match_info_elements,
1826 RegExpImpl::kLastInputOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001827 __ RecordWriteField(last_match_info_elements,
1828 RegExpImpl::kLastInputOffset,
1829 subject,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001830 r3,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001831 kLRHasNotBeenSaved,
1832 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001833
1834 // Get the static offsets vector filled by the native regexp code.
1835 ExternalReference address_of_static_offsets_vector =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 ExternalReference::address_of_static_offsets_vector(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001837 __ mov(r2, Operand(address_of_static_offsets_vector));
1838
1839 // r1: number of capture registers
1840 // r2: offsets vector
1841 Label next_capture, done;
1842 // Capture register counter starts from number of capture registers and
1843 // counts down until wraping after zero.
1844 __ add(r0,
1845 last_match_info_elements,
1846 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
1847 __ bind(&next_capture);
1848 __ sub(r1, r1, Operand(1), SetCC);
1849 __ b(mi, &done);
1850 // Read the value from the static offsets vector buffer.
1851 __ ldr(r3, MemOperand(r2, kPointerSize, PostIndex));
1852 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001853 __ SmiTag(r3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001854 __ str(r3, MemOperand(r0, kPointerSize, PostIndex));
1855 __ jmp(&next_capture);
1856 __ bind(&done);
1857
1858 // Return last match info.
1859 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
1860 __ add(sp, sp, Operand(4 * kPointerSize));
1861 __ Ret();
1862
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001863 // Do the runtime call to execute the regexp.
1864 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001865 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001866
1867 // Deferred code for string handling.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001868 // (5) Long external string? If not, go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001869 __ bind(&not_seq_nor_cons);
1870 // Compare flags are still set.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001871 __ b(gt, &not_long_external); // Go to (7).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001872
Ben Murdoch097c5b22016-05-18 11:27:45 +01001873 // (6) External string. Make it, offset-wise, look like a sequential string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001874 __ bind(&external_string);
1875 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
1876 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
1877 if (FLAG_debug_code) {
1878 // Assert that we do not have a cons or slice (indirect strings) here.
1879 // Sequential strings have already been ruled out.
1880 __ tst(r0, Operand(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001881 __ Assert(eq, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001882 }
1883 __ ldr(subject,
1884 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
1885 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001886 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001887 __ sub(subject,
1888 subject,
1889 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001890 __ jmp(&seq_string); // Go to (4).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001891
Ben Murdoch097c5b22016-05-18 11:27:45 +01001892 // (7) Short external string or not a string? If yes, bail out to runtime.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001893 __ bind(&not_long_external);
1894 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1895 __ tst(r1, Operand(kIsNotStringMask | kShortExternalStringMask));
1896 __ b(ne, &runtime);
1897
Ben Murdoch097c5b22016-05-18 11:27:45 +01001898 // (8) Sliced string. Replace subject with parent. Go to (4).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001899 // Load offset into r9 and replace subject string with parent.
1900 __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
1901 __ SmiUntag(r9);
1902 __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
1903 __ jmp(&check_underlying); // Go to (4).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001904#endif // V8_INTERPRETED_REGEXP
1905}
1906
1907
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001908static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1909 // r0 : number of arguments to the construct function
1910 // r1 : the function to call
1911 // r2 : feedback vector
1912 // r3 : slot in feedback vector (Smi)
1913 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1914
1915 // Number-of-arguments register must be smi-tagged to call out.
1916 __ SmiTag(r0);
1917 __ Push(r3, r2, r1, r0);
1918
1919 __ CallStub(stub);
1920
1921 __ Pop(r3, r2, r1, r0);
1922 __ SmiUntag(r0);
1923}
1924
1925
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001926static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001927 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001928 // are uninitialized, monomorphic (indicated by a JSFunction), and
1929 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001930 // r0 : number of arguments to the construct function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001931 // r1 : the function to call
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001932 // r2 : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001933 // r3 : slot in feedback vector (Smi)
1934 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001935
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001936 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
1937 masm->isolate()->heap()->megamorphic_symbol());
1938 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()),
1939 masm->isolate()->heap()->uninitialized_symbol());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001940
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001941 // Load the cache state into r5.
1942 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
1943 __ ldr(r5, FieldMemOperand(r5, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001944
1945 // A monomorphic cache hit or an already megamorphic state: invoke the
1946 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001947 // We don't know if r5 is a WeakCell or a Symbol, but it's harmless to read at
1948 // this position in a symbol (see static asserts in type-feedback-vector.h).
1949 Label check_allocation_site;
1950 Register feedback_map = r6;
1951 Register weak_value = r9;
1952 __ ldr(weak_value, FieldMemOperand(r5, WeakCell::kValueOffset));
1953 __ cmp(r1, weak_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001954 __ b(eq, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001955 __ CompareRoot(r5, Heap::kmegamorphic_symbolRootIndex);
1956 __ b(eq, &done);
1957 __ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
1958 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
1959 __ b(ne, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001960
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001961 // If the weak cell is cleared, we have a new chance to become monomorphic.
1962 __ JumpIfSmi(weak_value, &initialize);
1963 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001964
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001965 __ bind(&check_allocation_site);
1966 // If we came here, we need to see if we are the array function.
1967 // If we didn't have a matching function, and we didn't find the megamorph
1968 // sentinel, then we have in the slot either some other function or an
1969 // AllocationSite.
1970 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
1971 __ b(ne, &miss);
1972
1973 // Make sure the function is the Array() function
1974 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
1975 __ cmp(r1, r5);
1976 __ b(ne, &megamorphic);
1977 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001978
1979 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001980
1981 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1982 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001983 __ CompareRoot(r5, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001984 __ b(eq, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001985 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1986 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001987 __ bind(&megamorphic);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001988 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001989 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001990 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001991 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001992
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 // An uninitialized cache is patched with the function
1994 __ bind(&initialize);
1995
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996 // Make sure the function is the Array() function
1997 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
1998 __ cmp(r1, r5);
1999 __ b(ne, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002000
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002001 // The target function is the Array constructor,
2002 // Create an AllocationSite if we don't already have it, store it in the
2003 // slot.
2004 CreateAllocationSiteStub create_stub(masm->isolate());
2005 CallStubInRecordCallTarget(masm, &create_stub);
2006 __ b(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002007
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002008 __ bind(&not_array_function);
2009 CreateWeakCellStub weak_cell_stub(masm->isolate());
2010 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002011 __ bind(&done);
2012}
2013
2014
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002015void CallConstructStub::Generate(MacroAssembler* masm) {
2016 // r0 : number of arguments
2017 // r1 : the function to call
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002018 // r2 : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002019 // r3 : slot in feedback vector (Smi, for RecordCallTarget)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002020
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002021 Label non_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002022 // Check that the function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002023 __ JumpIfSmi(r1, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002024 // Check that the function is a JSFunction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002025 __ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE);
2026 __ b(ne, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002027
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002028 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002029
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002030 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
2031 Label feedback_register_initialized;
2032 // Put the AllocationSite from the feedback vector into r2, or undefined.
2033 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
2034 __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
2035 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
2036 __ b(eq, &feedback_register_initialized);
2037 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2038 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002039
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002040 __ AssertUndefinedOrAllocationSite(r2, r5);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002041
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002042 // Pass function as new target.
2043 __ mov(r3, r1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002044
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002045 // Tail call to the function-specific construct stub (still in the caller
2046 // context at this point).
2047 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2048 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2049 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002050
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002051 __ bind(&non_function);
2052 __ mov(r3, r1);
2053 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002054}
2055
2056
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002058 // r1 - function
2059 // r3 - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002060 // r2 - vector
2061 // r4 - allocation site (loaded from vector[slot])
2062 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
2063 __ cmp(r1, r5);
2064 __ b(ne, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002065
2066 __ mov(r0, Operand(arg_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002067
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002068 // Increment the call count for monomorphic function calls.
2069 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
2070 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
2071 __ ldr(r3, FieldMemOperand(r2, 0));
2072 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2073 __ str(r3, FieldMemOperand(r2, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002074
2075 __ mov(r2, r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002076 __ mov(r3, r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002077 ArrayConstructorStub stub(masm->isolate(), arg_count());
2078 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002079}
2080
2081
2082void CallICStub::Generate(MacroAssembler* masm) {
2083 // r1 - function
2084 // r3 - slot id (Smi)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002085 // r2 - vector
2086 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002087 int argc = arg_count();
2088 ParameterCount actual(argc);
2089
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002090 // The checks. First, does r1 match the recorded monomorphic target?
2091 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
2092 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002093
2094 // We don't know that we have a weak cell. We might have a private symbol
2095 // or an AllocationSite, but the memory is safe to examine.
2096 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2097 // FixedArray.
2098 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2099 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2100 // computed, meaning that it can't appear to be a pointer. If the low bit is
2101 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2102 // to be a pointer.
2103 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2104 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2105 WeakCell::kValueOffset &&
2106 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2107
2108 __ ldr(r5, FieldMemOperand(r4, WeakCell::kValueOffset));
2109 __ cmp(r1, r5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002110 __ b(ne, &extra_checks_or_miss);
2111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 // The compare above could have been a SMI/SMI comparison. Guard against this
2113 // convincing us that we have a monomorphic JSFunction.
2114 __ JumpIfSmi(r1, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002115
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002116 // Increment the call count for monomorphic function calls.
2117 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
2118 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
2119 __ ldr(r3, FieldMemOperand(r2, 0));
2120 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2121 __ str(r3, FieldMemOperand(r2, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002122
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 __ bind(&call_function);
2124 __ mov(r0, Operand(argc));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002125 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
2126 tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002127 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002128
2129 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002130 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002132 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002133 __ b(eq, &call);
2134
2135 // Verify that r4 contains an AllocationSite
2136 __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
2137 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
2138 __ b(ne, &not_allocation_site);
2139
2140 // We have an allocation site.
2141 HandleArrayCase(masm, &miss);
2142
2143 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002144
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002145 // The following cases attempt to handle MISS cases without going to the
2146 // runtime.
2147 if (FLAG_trace_ic) {
2148 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002149 }
2150
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002151 __ CompareRoot(r4, Heap::kuninitialized_symbolRootIndex);
2152 __ b(eq, &uninitialized);
2153
2154 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2155 // to handle it here. More complex cases are dealt with in the runtime.
2156 __ AssertNotSmi(r4);
2157 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
2158 __ b(ne, &miss);
2159 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
2160 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
2161 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002162
2163 __ bind(&call);
2164 __ mov(r0, Operand(argc));
Ben Murdoch097c5b22016-05-18 11:27:45 +01002165 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002166 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002167
2168 __ bind(&uninitialized);
2169
2170 // We are going monomorphic, provided we actually have a JSFunction.
2171 __ JumpIfSmi(r1, &miss);
2172
2173 // Goto miss case if we do not have a function.
2174 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
2175 __ b(ne, &miss);
2176
2177 // Make sure the function is not the Array() function, which requires special
2178 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002179 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002180 __ cmp(r1, r4);
2181 __ b(eq, &miss);
2182
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002183 // Make sure the function belongs to the same native context.
2184 __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset));
2185 __ ldr(r4, ContextMemOperand(r4, Context::NATIVE_CONTEXT_INDEX));
2186 __ ldr(ip, NativeContextMemOperand());
2187 __ cmp(r4, ip);
2188 __ b(ne, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002189
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002190 // Initialize the call counter.
2191 __ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002192 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002193 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002194
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002195 // Store the function. Use a stub since we need a frame for allocation.
2196 // r2 - vector
2197 // r3 - slot
2198 // r1 - function
2199 {
2200 FrameScope scope(masm, StackFrame::INTERNAL);
2201 CreateWeakCellStub create_stub(masm->isolate());
2202 __ Push(r1);
2203 __ CallStub(&create_stub);
2204 __ Pop(r1);
2205 }
2206
2207 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002208
2209 // We are here because tracing is on or we encountered a MISS case we can't
2210 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002211 __ bind(&miss);
2212 GenerateMiss(masm);
2213
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002214 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002215}
2216
2217
2218void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002219 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002220
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002221 // Push the receiver and the function and feedback info.
2222 __ Push(r1, r2, r3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002223
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002224 // Call the entry.
2225 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002226
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002227 // Move result to edi and exit the internal frame.
2228 __ mov(r1, r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002229}
2230
2231
2232// StringCharCodeAtGenerator
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002233void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002234 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002235 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2236 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002237
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002238 // Fetch the instance type of the receiver into result register.
2239 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2240 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2241 // If the receiver is not a string trigger the non-string case.
2242 __ tst(result_, Operand(kIsNotStringMask));
2243 __ b(ne, receiver_not_string_);
2244 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002245
2246 // If the index is non-smi trigger the non-smi case.
Steve Block1e0659c2011-05-24 12:43:12 +01002247 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002248 __ bind(&got_smi_index_);
2249
2250 // Check for index out of range.
2251 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002252 __ cmp(ip, Operand(index_));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002253 __ b(ls, index_out_of_range_);
2254
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002255 __ SmiUntag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002256
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002257 StringCharLoadGenerator::Generate(masm,
2258 object_,
2259 index_,
2260 result_,
2261 &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002262
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002263 __ SmiTag(result_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002264 __ bind(&exit_);
2265}
2266
2267
2268void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002269 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002270 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002271 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002272
2273 // Index is not a smi.
2274 __ bind(&index_not_smi_);
2275 // If index is a heap number, try converting it to an integer.
2276 __ CheckMap(index_,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002277 result_,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002278 Heap::kHeapNumberMapRootIndex,
2279 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00002280 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002281 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002282 if (embed_mode == PART_OF_IC_HANDLER) {
2283 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2284 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2285 } else {
2286 // index_ is consumed by runtime conversion function.
2287 __ Push(object_, index_);
2288 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002289 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002290 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002291 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002292 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002293 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002294 __ CallRuntime(Runtime::kNumberToSmi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002295 }
2296 // Save the conversion result before the pop instructions below
2297 // have a chance to overwrite it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002298 __ Move(index_, r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002299 if (embed_mode == PART_OF_IC_HANDLER) {
2300 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2301 LoadWithVectorDescriptor::SlotRegister(), object_);
2302 } else {
2303 __ pop(object_);
2304 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002305 // Reload the instance type.
2306 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2307 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2308 call_helper.AfterCall(masm);
2309 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002310 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002311 // Otherwise, return to the fast path.
2312 __ jmp(&got_smi_index_);
2313
2314 // Call runtime. We get here when the receiver is a string and the
2315 // index is a number, but the code of getting the actual character
2316 // is too complex (e.g., when the string needs to be flattened).
2317 __ bind(&call_runtime_);
2318 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002319 __ SmiTag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002320 __ Push(object_, index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002321 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002322 __ Move(result_, r0);
2323 call_helper.AfterCall(masm);
2324 __ jmp(&exit_);
2325
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002326 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002327}
2328
2329
2330// -------------------------------------------------------------------------
2331// StringCharFromCodeGenerator
2332
2333void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2334 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2335 STATIC_ASSERT(kSmiTag == 0);
2336 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002337 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
2338 __ tst(code_, Operand(kSmiTagMask |
2339 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
Steve Block1e0659c2011-05-24 12:43:12 +01002340 __ b(ne, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002341
2342 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002343 // At this point code register contains smi tagged one-byte char code.
2344 __ add(result_, result_, Operand::PointerOffsetFromSmiKey(code_));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002345 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002346 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002347 __ b(eq, &slow_case_);
2348 __ bind(&exit_);
2349}
2350
2351
2352void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002353 MacroAssembler* masm,
2354 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002355 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002356
2357 __ bind(&slow_case_);
2358 call_helper.BeforeCall(masm);
2359 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002360 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002361 __ Move(result_, r0);
2362 call_helper.AfterCall(masm);
2363 __ jmp(&exit_);
2364
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002365 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002366}
2367
2368
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002369enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002370
2371
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002372void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2373 Register dest,
2374 Register src,
2375 Register count,
2376 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002377 String::Encoding encoding) {
2378 if (FLAG_debug_code) {
2379 // Check that destination is word aligned.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002380 __ tst(dest, Operand(kPointerAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002381 __ Check(eq, kDestinationOfCopyNotAligned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002382 }
2383
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002384 // Assumes word reads and writes are little endian.
2385 // Nothing to do for zero characters.
2386 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002387 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002388 __ add(count, count, Operand(count), SetCC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002389 }
2390
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002391 Register limit = count; // Read until dest equals this.
2392 __ add(limit, dest, Operand(count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002393
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002394 Label loop_entry, loop;
2395 // Copy bytes from src to dest until dest hits limit.
2396 __ b(&loop_entry);
2397 __ bind(&loop);
2398 __ ldrb(scratch, MemOperand(src, 1, PostIndex), lt);
2399 __ strb(scratch, MemOperand(dest, 1, PostIndex));
2400 __ bind(&loop_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002401 __ cmp(dest, Operand(limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002402 __ b(lt, &loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002403
2404 __ bind(&done);
2405}
2406
2407
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002408void SubStringStub::Generate(MacroAssembler* masm) {
2409 Label runtime;
2410
2411 // Stack frame on entry.
2412 // lr: return address
2413 // sp[0]: to
2414 // sp[4]: from
2415 // sp[8]: string
2416
2417 // This stub is called from the native-call %_SubString(...), so
2418 // nothing can be assumed about the arguments. It is tested that:
2419 // "string" is a sequential string,
2420 // both "from" and "to" are smis, and
2421 // 0 <= from <= to <= string.length.
2422 // If any of these assumptions fail, we call the runtime system.
2423
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002424 const int kToOffset = 0 * kPointerSize;
2425 const int kFromOffset = 1 * kPointerSize;
2426 const int kStringOffset = 2 * kPointerSize;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002427
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002428 __ Ldrd(r2, r3, MemOperand(sp, kToOffset));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002429 STATIC_ASSERT(kFromOffset == kToOffset + 4);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002430 STATIC_ASSERT(kSmiTag == 0);
2431 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002432
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002433 // Arithmetic shift right by one un-smi-tags. In this case we rotate right
2434 // instead because we bail out on non-smi values: ROR and ASR are equivalent
2435 // for smis but they set the flags in a way that's easier to optimize.
2436 __ mov(r2, Operand(r2, ROR, 1), SetCC);
2437 __ mov(r3, Operand(r3, ROR, 1), SetCC, cc);
2438 // If either to or from had the smi tag bit set, then C is set now, and N
2439 // has the same value: we rotated by 1, so the bottom bit is now the top bit.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002440 // We want to bailout to runtime here if From is negative. In that case, the
2441 // next instruction is not executed and we fall through to bailing out to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 // runtime.
2443 // Executed if both r2 and r3 are untagged integers.
2444 __ sub(r2, r2, Operand(r3), SetCC, cc);
2445 // One of the above un-smis or the above SUB could have set N==1.
2446 __ b(mi, &runtime); // Either "from" or "to" is not an smi, or from > to.
Ben Murdoch85b71792012-04-11 18:30:58 +01002447
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002448 // Make sure first argument is a string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002449 __ ldr(r0, MemOperand(sp, kStringOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +00002450 __ JumpIfSmi(r0, &runtime);
2451 Condition is_string = masm->IsObjectStringType(r0, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002452 __ b(NegateCondition(is_string), &runtime);
2453
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002454 Label single_char;
2455 __ cmp(r2, Operand(1));
2456 __ b(eq, &single_char);
2457
Ben Murdoch589d6972011-11-30 16:04:58 +00002458 // Short-cut for the case of trivial substring.
2459 Label return_r0;
2460 // r0: original string
2461 // r2: result string length
2462 __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
2463 __ cmp(r2, Operand(r4, ASR, 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002464 // Return original string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002465 __ b(eq, &return_r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002466 // Longer than original string's length or negative: unsafe arguments.
2467 __ b(hi, &runtime);
2468 // Shorter than original string's length: an actual substring.
Ben Murdoch589d6972011-11-30 16:04:58 +00002469
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002470 // Deal with different string types: update the index if necessary
2471 // and put the underlying string into r5.
2472 // r0: original string
2473 // r1: instance type
2474 // r2: length
2475 // r3: from index (untagged)
2476 Label underlying_unpacked, sliced_string, seq_or_external_string;
2477 // If the string is not indirect, it can only be sequential or external.
2478 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2479 STATIC_ASSERT(kIsIndirectStringMask != 0);
2480 __ tst(r1, Operand(kIsIndirectStringMask));
2481 __ b(eq, &seq_or_external_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002482
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002483 __ tst(r1, Operand(kSlicedNotConsMask));
2484 __ b(ne, &sliced_string);
2485 // Cons string. Check whether it is flat, then fetch first part.
2486 __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002487 __ CompareRoot(r5, Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002488 __ b(ne, &runtime);
2489 __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
2490 // Update instance type.
2491 __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
2492 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
2493 __ jmp(&underlying_unpacked);
Ben Murdoch589d6972011-11-30 16:04:58 +00002494
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002495 __ bind(&sliced_string);
2496 // Sliced string. Fetch parent and correct start index by offset.
2497 __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
2498 __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
2499 __ add(r3, r3, Operand(r4, ASR, 1)); // Add offset to index.
2500 // Update instance type.
2501 __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
2502 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
2503 __ jmp(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002504
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002505 __ bind(&seq_or_external_string);
2506 // Sequential or external string. Just move string to the expected register.
2507 __ mov(r5, r0);
2508
2509 __ bind(&underlying_unpacked);
2510
2511 if (FLAG_string_slices) {
2512 Label copy_routine;
2513 // r5: underlying subject string
2514 // r1: instance type of underlying subject string
2515 // r2: length
2516 // r3: adjusted start index (untagged)
2517 __ cmp(r2, Operand(SlicedString::kMinLength));
2518 // Short slice. Copy instead of slicing.
2519 __ b(lt, &copy_routine);
2520 // Allocate new sliced string. At this point we do not reload the instance
2521 // type including the string encoding because we simply rely on the info
2522 // provided by the original string. It does not matter if the original
2523 // string's encoding is wrong because we always have to recheck encoding of
2524 // the newly created string's parent anyways due to externalized strings.
2525 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002526 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002527 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2528 __ tst(r1, Operand(kStringEncodingMask));
2529 __ b(eq, &two_byte_slice);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002530 __ AllocateOneByteSlicedString(r0, r2, r6, r4, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002531 __ jmp(&set_slice_header);
2532 __ bind(&two_byte_slice);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002533 __ AllocateTwoByteSlicedString(r0, r2, r6, r4, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002534 __ bind(&set_slice_header);
2535 __ mov(r3, Operand(r3, LSL, 1));
2536 __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
2537 __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
2538 __ jmp(&return_r0);
2539
2540 __ bind(&copy_routine);
2541 }
2542
2543 // r5: underlying subject string
2544 // r1: instance type of underlying subject string
2545 // r2: length
2546 // r3: adjusted start index (untagged)
2547 Label two_byte_sequential, sequential_string, allocate_result;
2548 STATIC_ASSERT(kExternalStringTag != 0);
2549 STATIC_ASSERT(kSeqStringTag == 0);
2550 __ tst(r1, Operand(kExternalStringTag));
2551 __ b(eq, &sequential_string);
2552
2553 // Handle external string.
2554 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002555 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002556 __ tst(r1, Operand(kShortExternalStringTag));
2557 __ b(ne, &runtime);
2558 __ ldr(r5, FieldMemOperand(r5, ExternalString::kResourceDataOffset));
2559 // r5 already points to the first character of underlying string.
2560 __ jmp(&allocate_result);
2561
2562 __ bind(&sequential_string);
2563 // Locate first character of underlying subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002564 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2565 __ add(r5, r5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002566
2567 __ bind(&allocate_result);
2568 // Sequential acii string. Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002569 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002570 __ tst(r1, Operand(kStringEncodingMask));
2571 __ b(eq, &two_byte_sequential);
2572
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002573 // Allocate and copy the resulting one-byte string.
2574 __ AllocateOneByteString(r0, r2, r4, r6, r1, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002575
2576 // Locate first character of substring to copy.
2577 __ add(r5, r5, r3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002578 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002579 __ add(r1, r0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002580
Ben Murdoch589d6972011-11-30 16:04:58 +00002581 // r0: result string
2582 // r1: first character of result string
2583 // r2: result string length
2584 // r5: first character of substring to copy
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002585 STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2586 StringHelper::GenerateCopyCharacters(
2587 masm, r1, r5, r2, r3, String::ONE_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002588 __ jmp(&return_r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002589
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002590 // Allocate and copy the resulting two-byte string.
2591 __ bind(&two_byte_sequential);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002592 __ AllocateTwoByteString(r0, r2, r4, r6, r1, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002593
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002594 // Locate first character of substring to copy.
Ben Murdoch589d6972011-11-30 16:04:58 +00002595 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002596 __ add(r5, r5, Operand(r3, LSL, 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002597 // Locate first character of result.
2598 __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch589d6972011-11-30 16:04:58 +00002599
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002600 // r0: result string.
2601 // r1: first character of result.
2602 // r2: result length.
Ben Murdoch589d6972011-11-30 16:04:58 +00002603 // r5: first character of substring to copy.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002604 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002605 StringHelper::GenerateCopyCharacters(
2606 masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002607
2608 __ bind(&return_r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002609 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +01002610 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002611 __ Drop(3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002612 __ Ret();
2613
2614 // Just jump to runtime to create the sub string.
2615 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002616 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002617
2618 __ bind(&single_char);
2619 // r0: original string
2620 // r1: instance type
2621 // r2: length
2622 // r3: from index (untagged)
2623 __ SmiTag(r3, r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002624 StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime,
2625 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002626 generator.GenerateFast(masm);
2627 __ Drop(3);
2628 __ Ret();
2629 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002630}
2631
2632
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002633void ToNumberStub::Generate(MacroAssembler* masm) {
2634 // The ToNumber stub takes one argument in r0.
Ben Murdochda12d292016-06-02 14:46:10 +01002635 STATIC_ASSERT(kSmiTag == 0);
2636 __ tst(r0, Operand(kSmiTagMask));
2637 __ Ret(eq);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002638
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002639 __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
2640 // r0: receiver
2641 // r1: receiver instance type
2642 __ Ret(eq);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002643
Ben Murdochda12d292016-06-02 14:46:10 +01002644 NonNumberToNumberStub stub(masm->isolate());
2645 __ TailCallStub(&stub);
2646}
2647
2648void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
2649 // The NonNumberToNumber stub takes one argument in r0.
2650 __ AssertNotNumber(r0);
2651
2652 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
2653 // r0: receiver
2654 // r1: receiver instance type
2655 StringToNumberStub stub(masm->isolate());
2656 __ TailCallStub(&stub, lo);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002657
2658 Label not_oddball;
2659 __ cmp(r1, Operand(ODDBALL_TYPE));
2660 __ b(ne, &not_oddball);
2661 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
2662 __ Ret();
2663 __ bind(&not_oddball);
2664
Ben Murdochda12d292016-06-02 14:46:10 +01002665 __ Push(r0); // Push argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002666 __ TailCallRuntime(Runtime::kToNumber);
2667}
2668
Ben Murdochda12d292016-06-02 14:46:10 +01002669void StringToNumberStub::Generate(MacroAssembler* masm) {
2670 // The StringToNumber stub takes one argument in r0.
2671 __ AssertString(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002672
Ben Murdochda12d292016-06-02 14:46:10 +01002673 // Check if string has a cached array index.
2674 Label runtime;
2675 __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset));
2676 __ tst(r2, Operand(String::kContainsCachedArrayIndexMask));
2677 __ b(ne, &runtime);
2678 __ IndexFromHash(r2, r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002679 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002680
Ben Murdochda12d292016-06-02 14:46:10 +01002681 __ bind(&runtime);
2682 __ Push(r0); // Push argument.
2683 __ TailCallRuntime(Runtime::kStringToNumber);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002684}
2685
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002686void ToStringStub::Generate(MacroAssembler* masm) {
2687 // The ToString stub takes one argument in r0.
2688 Label is_number;
2689 __ JumpIfSmi(r0, &is_number);
2690
2691 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
2692 // r0: receiver
2693 // r1: receiver instance type
2694 __ Ret(lo);
2695
2696 Label not_heap_number;
2697 __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
2698 __ b(ne, &not_heap_number);
2699 __ bind(&is_number);
2700 NumberToStringStub stub(isolate());
2701 __ TailCallStub(&stub);
2702 __ bind(&not_heap_number);
2703
2704 Label not_oddball;
2705 __ cmp(r1, Operand(ODDBALL_TYPE));
2706 __ b(ne, &not_oddball);
2707 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
2708 __ Ret();
2709 __ bind(&not_oddball);
2710
2711 __ push(r0); // Push argument.
2712 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002713}
2714
2715
Ben Murdoch097c5b22016-05-18 11:27:45 +01002716void ToNameStub::Generate(MacroAssembler* masm) {
2717 // The ToName stub takes one argument in r0.
2718 Label is_number;
2719 __ JumpIfSmi(r0, &is_number);
2720
2721 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
2722 __ CompareObjectType(r0, r1, r1, LAST_NAME_TYPE);
2723 // r0: receiver
2724 // r1: receiver instance type
2725 __ Ret(ls);
2726
2727 Label not_heap_number;
2728 __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
2729 __ b(ne, &not_heap_number);
2730 __ bind(&is_number);
2731 NumberToStringStub stub(isolate());
2732 __ TailCallStub(&stub);
2733 __ bind(&not_heap_number);
2734
2735 Label not_oddball;
2736 __ cmp(r1, Operand(ODDBALL_TYPE));
2737 __ b(ne, &not_oddball);
2738 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
2739 __ Ret();
2740 __ bind(&not_oddball);
2741
2742 __ push(r0); // Push argument.
2743 __ TailCallRuntime(Runtime::kToName);
2744}
2745
2746
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002747void StringHelper::GenerateFlatOneByteStringEquals(
2748 MacroAssembler* masm, Register left, Register right, Register scratch1,
2749 Register scratch2, Register scratch3) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002750 Register length = scratch1;
2751
2752 // Compare lengths.
2753 Label strings_not_equal, check_zero_length;
2754 __ ldr(length, FieldMemOperand(left, String::kLengthOffset));
2755 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
2756 __ cmp(length, scratch2);
2757 __ b(eq, &check_zero_length);
2758 __ bind(&strings_not_equal);
2759 __ mov(r0, Operand(Smi::FromInt(NOT_EQUAL)));
2760 __ Ret();
2761
2762 // Check if the length is zero.
2763 Label compare_chars;
2764 __ bind(&check_zero_length);
2765 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002766 __ cmp(length, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00002767 __ b(ne, &compare_chars);
2768 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
2769 __ Ret();
2770
2771 // Compare characters.
2772 __ bind(&compare_chars);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002773 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, scratch3,
2774 &strings_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00002775
2776 // Characters are equal.
2777 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
2778 __ Ret();
2779}
2780
2781
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002782void StringHelper::GenerateCompareFlatOneByteStrings(
2783 MacroAssembler* masm, Register left, Register right, Register scratch1,
2784 Register scratch2, Register scratch3, Register scratch4) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002785 Label result_not_equal, compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002786 // Find minimum length and length difference.
2787 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset));
2788 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
2789 __ sub(scratch3, scratch1, Operand(scratch2), SetCC);
2790 Register length_delta = scratch3;
2791 __ mov(scratch1, scratch2, LeaveCC, gt);
2792 Register min_length = scratch1;
2793 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002794 __ cmp(min_length, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002795 __ b(eq, &compare_lengths);
2796
Ben Murdoch257744e2011-11-30 15:57:28 +00002797 // Compare loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002798 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
2799 scratch4, &result_not_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002800
Ben Murdoch257744e2011-11-30 15:57:28 +00002801 // Compare lengths - strings up to min-length are equal.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002802 __ bind(&compare_lengths);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002803 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00002804 // Use length_delta as result if it's zero.
2805 __ mov(r0, Operand(length_delta), SetCC);
2806 __ bind(&result_not_equal);
2807 // Conditionally update the result based either on length_delta or
2808 // the last comparion performed in the loop above.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002809 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt);
2810 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt);
2811 __ Ret();
2812}
2813
2814
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002815void StringHelper::GenerateOneByteCharsCompareLoop(
2816 MacroAssembler* masm, Register left, Register right, Register length,
2817 Register scratch1, Register scratch2, Label* chars_not_equal) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002818 // Change index to run from -length to -1 by adding length to string
2819 // start. This means that loop ends when index reaches zero, which
2820 // doesn't need an additional compare.
2821 __ SmiUntag(length);
2822 __ add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002823 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00002824 __ add(left, left, Operand(scratch1));
2825 __ add(right, right, Operand(scratch1));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002826 __ rsb(length, length, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00002827 Register index = length; // index = -length;
2828
2829 // Compare loop.
2830 Label loop;
2831 __ bind(&loop);
2832 __ ldrb(scratch1, MemOperand(left, index));
2833 __ ldrb(scratch2, MemOperand(right, index));
2834 __ cmp(scratch1, scratch2);
2835 __ b(ne, chars_not_equal);
2836 __ add(index, index, Operand(1), SetCC);
2837 __ b(ne, &loop);
2838}
2839
2840
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002841void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2842 // ----------- S t a t e -------------
2843 // -- r1 : left
2844 // -- r0 : right
2845 // -- lr : return address
2846 // -----------------------------------
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002847
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002848 // Load r2 with the allocation site. We stick an undefined dummy value here
2849 // and replace it with the real allocation site later when we instantiate this
2850 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2851 __ Move(r2, handle(isolate()->heap()->undefined_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01002852
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002853 // Make sure that we actually patched the allocation site.
2854 if (FLAG_debug_code) {
2855 __ tst(r2, Operand(kSmiTagMask));
2856 __ Assert(ne, kExpectedAllocationSite);
2857 __ push(r2);
2858 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2859 __ LoadRoot(ip, Heap::kAllocationSiteMapRootIndex);
2860 __ cmp(r2, ip);
2861 __ pop(r2);
2862 __ Assert(eq, kExpectedAllocationSite);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002863 }
2864
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002865 // Tail call into the stub that handles binary operations with allocation
2866 // sites.
2867 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2868 __ TailCallStub(&stub);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002869}
2870
2871
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002872void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2873 DCHECK_EQ(CompareICState::BOOLEAN, state());
2874 Label miss;
2875
2876 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
2877 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002878 if (!Token::IsEqualityOp(op())) {
2879 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset));
2880 __ AssertSmi(r1);
2881 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
2882 __ AssertSmi(r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002883 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01002884 __ sub(r0, r1, r0);
2885 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002886
2887 __ bind(&miss);
2888 GenerateMiss(masm);
2889}
2890
2891
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002892void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2893 DCHECK(state() == CompareICState::SMI);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002894 Label miss;
2895 __ orr(r2, r1, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002896 __ JumpIfNotSmi(r2, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002897
2898 if (GetCondition() == eq) {
2899 // For equality we do not care about the sign of the result.
2900 __ sub(r0, r0, r1, SetCC);
2901 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01002902 // Untag before subtracting to avoid handling overflow.
2903 __ SmiUntag(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002904 __ sub(r0, r1, Operand::SmiUntag(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002905 }
2906 __ Ret();
2907
2908 __ bind(&miss);
2909 GenerateMiss(masm);
2910}
2911
2912
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2914 DCHECK(state() == CompareICState::NUMBER);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002915
2916 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002917 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002918 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002919
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002920 if (left() == CompareICState::SMI) {
2921 __ JumpIfNotSmi(r1, &miss);
2922 }
2923 if (right() == CompareICState::SMI) {
2924 __ JumpIfNotSmi(r0, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002925 }
2926
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002927 // Inlining the double comparison and falling back to the general compare
2928 // stub if NaN is involved.
2929 // Load left and right operand.
2930 Label done, left, left_smi, right_smi;
2931 __ JumpIfSmi(r0, &right_smi);
2932 __ CheckMap(r0, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
2933 DONT_DO_SMI_CHECK);
2934 __ sub(r2, r0, Operand(kHeapObjectTag));
2935 __ vldr(d1, r2, HeapNumber::kValueOffset);
2936 __ b(&left);
2937 __ bind(&right_smi);
2938 __ SmiToDouble(d1, r0);
2939
2940 __ bind(&left);
2941 __ JumpIfSmi(r1, &left_smi);
2942 __ CheckMap(r1, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
2943 DONT_DO_SMI_CHECK);
2944 __ sub(r2, r1, Operand(kHeapObjectTag));
2945 __ vldr(d0, r2, HeapNumber::kValueOffset);
2946 __ b(&done);
2947 __ bind(&left_smi);
2948 __ SmiToDouble(d0, r1);
2949
2950 __ bind(&done);
2951 // Compare operands.
2952 __ VFPCompareAndSetFlags(d0, d1);
2953
2954 // Don't base result on status bits when a NaN is involved.
2955 __ b(vs, &unordered);
2956
2957 // Return a result of -1, 0, or 1, based on status bits.
2958 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
2959 __ mov(r0, Operand(LESS), LeaveCC, lt);
2960 __ mov(r0, Operand(GREATER), LeaveCC, gt);
2961 __ Ret();
2962
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002963 __ bind(&unordered);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002964 __ bind(&generic_stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002965 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002966 CompareICState::GENERIC, CompareICState::GENERIC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002967 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2968
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002969 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002970 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002971 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
2972 __ b(ne, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002973 __ JumpIfSmi(r1, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002974 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
2975 __ b(ne, &maybe_undefined2);
2976 __ jmp(&unordered);
2977 }
2978
2979 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002980 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002981 __ CompareRoot(r1, Heap::kUndefinedValueRootIndex);
2982 __ b(eq, &unordered);
2983 }
2984
Ben Murdochb0fe1622011-05-05 13:52:32 +01002985 __ bind(&miss);
2986 GenerateMiss(masm);
2987}
2988
2989
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002990void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2991 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00002992 Label miss;
2993
2994 // Registers containing left and right operands respectively.
2995 Register left = r1;
2996 Register right = r0;
2997 Register tmp1 = r2;
2998 Register tmp2 = r3;
2999
3000 // Check that both operands are heap objects.
3001 __ JumpIfEitherSmi(left, right, &miss);
3002
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003003 // Check that both operands are internalized strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00003004 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3005 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3006 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3007 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003008 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3009 __ orr(tmp1, tmp1, Operand(tmp2));
3010 __ tst(tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3011 __ b(ne, &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003012
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003013 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00003014 __ cmp(left, right);
3015 // Make sure r0 is non-zero. At this point input operands are
3016 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003017 DCHECK(right.is(r0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003018 STATIC_ASSERT(EQUAL == 0);
3019 STATIC_ASSERT(kSmiTag == 0);
3020 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
3021 __ Ret();
3022
3023 __ bind(&miss);
3024 GenerateMiss(masm);
3025}
3026
3027
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003028void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3029 DCHECK(state() == CompareICState::UNIQUE_NAME);
3030 DCHECK(GetCondition() == eq);
Ben Murdoch257744e2011-11-30 15:57:28 +00003031 Label miss;
3032
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003033 // Registers containing left and right operands respectively.
3034 Register left = r1;
3035 Register right = r0;
3036 Register tmp1 = r2;
3037 Register tmp2 = r3;
3038
3039 // Check that both operands are heap objects.
3040 __ JumpIfEitherSmi(left, right, &miss);
3041
3042 // Check that both operands are unique names. This leaves the instance
3043 // types loaded in tmp1 and tmp2.
3044 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3045 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3046 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3047 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3048
3049 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
3050 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
3051
3052 // Unique names are compared by identity.
3053 __ cmp(left, right);
3054 // Make sure r0 is non-zero. At this point input operands are
3055 // guaranteed to be non-zero.
3056 DCHECK(right.is(r0));
3057 STATIC_ASSERT(EQUAL == 0);
3058 STATIC_ASSERT(kSmiTag == 0);
3059 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
3060 __ Ret();
3061
3062 __ bind(&miss);
3063 GenerateMiss(masm);
3064}
3065
3066
3067void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3068 DCHECK(state() == CompareICState::STRING);
3069 Label miss;
3070
3071 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003072
Ben Murdoch257744e2011-11-30 15:57:28 +00003073 // Registers containing left and right operands respectively.
3074 Register left = r1;
3075 Register right = r0;
3076 Register tmp1 = r2;
3077 Register tmp2 = r3;
3078 Register tmp3 = r4;
3079 Register tmp4 = r5;
3080
3081 // Check that both operands are heap objects.
3082 __ JumpIfEitherSmi(left, right, &miss);
3083
3084 // Check that both operands are strings. This leaves the instance
3085 // types loaded in tmp1 and tmp2.
3086 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3087 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3088 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3089 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3090 STATIC_ASSERT(kNotStringTag != 0);
3091 __ orr(tmp3, tmp1, tmp2);
3092 __ tst(tmp3, Operand(kIsNotStringMask));
3093 __ b(ne, &miss);
3094
3095 // Fast check for identical strings.
3096 __ cmp(left, right);
3097 STATIC_ASSERT(EQUAL == 0);
3098 STATIC_ASSERT(kSmiTag == 0);
3099 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
3100 __ Ret(eq);
3101
3102 // Handle not identical strings.
3103
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003104 // Check that both strings are internalized strings. If they are, we're done
3105 // because we already know they are not identical. We know they are both
3106 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003107 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003108 DCHECK(GetCondition() == eq);
3109 STATIC_ASSERT(kInternalizedTag == 0);
3110 __ orr(tmp3, tmp1, Operand(tmp2));
3111 __ tst(tmp3, Operand(kIsNotInternalizedMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003112 // Make sure r0 is non-zero. At this point input operands are
3113 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003114 DCHECK(right.is(r0));
3115 __ Ret(eq);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003116 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003117
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003118 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00003119 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003120 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
3121 &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00003122
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003123 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003124 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003125 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, tmp2,
3126 tmp3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003127 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003128 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3129 tmp2, tmp3, tmp4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003130 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003131
3132 // Handle more complex cases in runtime.
3133 __ bind(&runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003134 if (equality) {
Ben Murdochda12d292016-06-02 14:46:10 +01003135 {
3136 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3137 __ Push(left, right);
3138 __ CallRuntime(Runtime::kStringEqual);
3139 }
3140 __ LoadRoot(r1, Heap::kTrueValueRootIndex);
3141 __ sub(r0, r0, r1);
3142 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003143 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01003144 __ Push(left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003145 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003146 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003147
3148 __ bind(&miss);
3149 GenerateMiss(masm);
3150}
3151
3152
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003153void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3154 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003155 Label miss;
3156 __ and_(r2, r1, Operand(r0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003157 __ JumpIfSmi(r2, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003159 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3160 __ CompareObjectType(r0, r2, r2, FIRST_JS_RECEIVER_TYPE);
3161 __ b(lt, &miss);
3162 __ CompareObjectType(r1, r2, r2, FIRST_JS_RECEIVER_TYPE);
3163 __ b(lt, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003164
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003165 DCHECK(GetCondition() == eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003166 __ sub(r0, r0, Operand(r1));
3167 __ Ret();
3168
3169 __ bind(&miss);
3170 GenerateMiss(masm);
3171}
3172
3173
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003174void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003175 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003176 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003177 __ and_(r2, r1, Operand(r0));
3178 __ JumpIfSmi(r2, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003179 __ GetWeakValue(r4, cell);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003180 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3181 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003182 __ cmp(r2, r4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003183 __ b(ne, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003184 __ cmp(r3, r4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003185 __ b(ne, &miss);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003186
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003187 if (Token::IsEqualityOp(op())) {
3188 __ sub(r0, r0, Operand(r1));
3189 __ Ret();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003190 } else {
3191 if (op() == Token::LT || op() == Token::LTE) {
3192 __ mov(r2, Operand(Smi::FromInt(GREATER)));
3193 } else {
3194 __ mov(r2, Operand(Smi::FromInt(LESS)));
3195 }
3196 __ Push(r1, r0, r2);
3197 __ TailCallRuntime(Runtime::kCompare);
3198 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003199
3200 __ bind(&miss);
3201 GenerateMiss(masm);
3202}
3203
3204
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003205void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003206 {
3207 // Call the runtime system in a fresh internal frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003208 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003209 __ Push(r1, r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003210 __ Push(lr, r1, r0);
3211 __ mov(ip, Operand(Smi::FromInt(op())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003212 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003213 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003214 // Compute the entry point of the rewritten stub.
3215 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
3216 // Restore registers.
3217 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003218 __ Pop(r1, r0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003219 }
3220
Ben Murdochb0fe1622011-05-05 13:52:32 +01003221 __ Jump(r2);
3222}
3223
3224
Steve Block1e0659c2011-05-24 12:43:12 +01003225void DirectCEntryStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003226 // Place the return address on the stack, making the call
3227 // GC safe. The RegExp backend also relies on this.
3228 __ str(lr, MemOperand(sp, 0));
3229 __ blx(ip); // Call the C++ function.
3230 __ VFPEnsureFPSCRState(r2);
Steve Block1e0659c2011-05-24 12:43:12 +01003231 __ ldr(pc, MemOperand(sp, 0));
3232}
3233
3234
3235void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003236 Register target) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003237 intptr_t code =
3238 reinterpret_cast<intptr_t>(GetCode().location());
3239 __ Move(ip, target);
3240 __ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
3241 __ blx(lr); // Call the stub.
Steve Block1e0659c2011-05-24 12:43:12 +01003242}
3243
3244
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003245void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3246 Label* miss,
3247 Label* done,
3248 Register receiver,
3249 Register properties,
3250 Handle<Name> name,
3251 Register scratch0) {
3252 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003253 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3254 // not equal to the name and kProbes-th slot is not used (its name is the
3255 // undefined value), it guarantees the hash table doesn't contain the
3256 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003257 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003258 for (int i = 0; i < kInlinedProbes; i++) {
3259 // scratch0 points to properties hash.
3260 // Compute the masked index: (hash + i + i * i) & mask.
3261 Register index = scratch0;
3262 // Capacity is smi 2^n.
3263 __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
3264 __ sub(index, index, Operand(1));
3265 __ and_(index, index, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003266 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))));
Ben Murdoch257744e2011-11-30 15:57:28 +00003267
3268 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003269 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003270 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
3271
3272 Register entity_name = scratch0;
3273 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003274 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003275 Register tmp = properties;
3276 __ add(tmp, properties, Operand(index, LSL, 1));
3277 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
3278
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003279 DCHECK(!tmp.is(entity_name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003280 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3281 __ cmp(entity_name, tmp);
3282 __ b(eq, done);
3283
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003284 // Load the hole ready for use below:
3285 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003286
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003287 // Stop if found the property.
3288 __ cmp(entity_name, Operand(Handle<Name>(name)));
3289 __ b(eq, miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003290
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003291 Label good;
3292 __ cmp(entity_name, tmp);
3293 __ b(eq, &good);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003294
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003295 // Check if the entry name is not a unique name.
3296 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
3297 __ ldrb(entity_name,
3298 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
3299 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3300 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003301
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003302 // Restore the properties.
3303 __ ldr(properties,
3304 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003305 }
3306
3307 const int spill_mask =
3308 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() |
3309 r2.bit() | r1.bit() | r0.bit());
3310
3311 __ stm(db_w, sp, spill_mask);
3312 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003313 __ mov(r1, Operand(Handle<Name>(name)));
3314 NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003315 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003316 __ cmp(r0, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003317 __ ldm(ia_w, sp, spill_mask);
3318
3319 __ b(eq, done);
3320 __ b(ne, miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003321}
3322
3323
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003324// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003325// |done| label if a property with the given name is found. Jump to
3326// the |miss| label otherwise.
3327// If lookup was successful |scratch2| will be equal to elements + 4 * index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003328void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3329 Label* miss,
3330 Label* done,
3331 Register elements,
3332 Register name,
3333 Register scratch1,
3334 Register scratch2) {
3335 DCHECK(!elements.is(scratch1));
3336 DCHECK(!elements.is(scratch2));
3337 DCHECK(!name.is(scratch1));
3338 DCHECK(!name.is(scratch2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003339
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003340 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003341
3342 // Compute the capacity mask.
3343 __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003344 __ SmiUntag(scratch1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003345 __ sub(scratch1, scratch1, Operand(1));
3346
3347 // Generate an unrolled loop that performs a few probes before
3348 // giving up. Measurements done on Gmail indicate that 2 probes
3349 // cover ~93% of loads from dictionaries.
3350 for (int i = 0; i < kInlinedProbes; i++) {
3351 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003352 __ ldr(scratch2, FieldMemOperand(name, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003353 if (i > 0) {
3354 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3355 // the hash in a separate instruction. The value hash + i + i * i is right
3356 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003357 DCHECK(NameDictionary::GetProbeOffset(i) <
3358 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003359 __ add(scratch2, scratch2, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003360 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003361 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003362 __ and_(scratch2, scratch1, Operand(scratch2, LSR, Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003363
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003364 // Scale the index by multiplying by the entry size.
3365 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003366 // scratch2 = scratch2 * 3.
3367 __ add(scratch2, scratch2, Operand(scratch2, LSL, 1));
3368
3369 // Check if the key is identical to the name.
3370 __ add(scratch2, elements, Operand(scratch2, LSL, 2));
3371 __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset));
3372 __ cmp(name, Operand(ip));
3373 __ b(eq, done);
3374 }
3375
3376 const int spill_mask =
3377 (lr.bit() | r6.bit() | r5.bit() | r4.bit() |
3378 r3.bit() | r2.bit() | r1.bit() | r0.bit()) &
3379 ~(scratch1.bit() | scratch2.bit());
3380
3381 __ stm(db_w, sp, spill_mask);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003382 if (name.is(r0)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003383 DCHECK(!elements.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003384 __ Move(r1, name);
3385 __ Move(r0, elements);
3386 } else {
3387 __ Move(r0, elements);
3388 __ Move(r1, name);
3389 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003390 NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003391 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003392 __ cmp(r0, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003393 __ mov(scratch2, Operand(r2));
3394 __ ldm(ia_w, sp, spill_mask);
3395
3396 __ b(ne, done);
3397 __ b(eq, miss);
3398}
3399
3400
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003401void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003402 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3403 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003404 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003405 // result: NameDictionary to probe
Ben Murdoch257744e2011-11-30 15:57:28 +00003406 // r1: key
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003407 // dictionary: NameDictionary to probe.
3408 // index: will hold an index of entry if lookup is successful.
3409 // might alias with result_.
Ben Murdoch257744e2011-11-30 15:57:28 +00003410 // Returns:
3411 // result_ is zero if lookup failed, non zero otherwise.
3412
3413 Register result = r0;
3414 Register dictionary = r0;
3415 Register key = r1;
3416 Register index = r2;
3417 Register mask = r3;
3418 Register hash = r4;
3419 Register undefined = r5;
3420 Register entry_key = r6;
3421
3422 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3423
3424 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003425 __ SmiUntag(mask);
Ben Murdoch257744e2011-11-30 15:57:28 +00003426 __ sub(mask, mask, Operand(1));
3427
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003428 __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003429
3430 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3431
3432 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3433 // Compute the masked index: (hash + i + i * i) & mask.
3434 // Capacity is smi 2^n.
3435 if (i > 0) {
3436 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3437 // the hash in a separate instruction. The value hash + i + i * i is right
3438 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003439 DCHECK(NameDictionary::GetProbeOffset(i) <
3440 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003441 __ add(index, hash, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003442 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003443 } else {
3444 __ mov(index, Operand(hash));
3445 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003446 __ and_(index, mask, Operand(index, LSR, Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003447
3448 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003449 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003450 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
3451
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003452 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003453 __ add(index, dictionary, Operand(index, LSL, 2));
3454 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset));
3455
3456 // Having undefined at this place means the name is not contained.
3457 __ cmp(entry_key, Operand(undefined));
3458 __ b(eq, &not_in_dictionary);
3459
3460 // Stop if found the property.
3461 __ cmp(entry_key, Operand(key));
3462 __ b(eq, &in_dictionary);
3463
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003464 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3465 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003466 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
3467 __ ldrb(entry_key,
3468 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003469 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003470 }
3471 }
3472
3473 __ bind(&maybe_in_dictionary);
3474 // If we are doing negative lookup then probing failure should be
3475 // treated as a lookup success. For positive lookup probing failure
3476 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003477 if (mode() == POSITIVE_LOOKUP) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003478 __ mov(result, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003479 __ Ret();
3480 }
3481
3482 __ bind(&in_dictionary);
3483 __ mov(result, Operand(1));
3484 __ Ret();
3485
3486 __ bind(&not_in_dictionary);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003487 __ mov(result, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003488 __ Ret();
3489}
3490
3491
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003492void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3493 Isolate* isolate) {
3494 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3495 stub1.GetCode();
3496 // Hydrogen code stubs need stub2 at snapshot time.
3497 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3498 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003499}
3500
3501
3502// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3503// the value has just been written into the object, now this stub makes sure
3504// we keep the GC informed. The word in the object where the value has been
3505// written is in the address register.
3506void RecordWriteStub::Generate(MacroAssembler* masm) {
3507 Label skip_to_incremental_noncompacting;
3508 Label skip_to_incremental_compacting;
3509
3510 // The first two instructions are generated with labels so as to get the
3511 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3512 // forth between a compare instructions (a nop in this position) and the
3513 // real branch when we start and stop incremental heap marking.
3514 // See RecordWriteStub::Patch for details.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003515 {
3516 // Block literal pool emission, as the position of these two instructions
3517 // is assumed by the patching code.
3518 Assembler::BlockConstPoolScope block_const_pool(masm);
3519 __ b(&skip_to_incremental_noncompacting);
3520 __ b(&skip_to_incremental_compacting);
3521 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003522
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003523 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3524 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003525 MacroAssembler::kReturnAtEnd);
3526 }
3527 __ Ret();
3528
3529 __ bind(&skip_to_incremental_noncompacting);
3530 GenerateIncremental(masm, INCREMENTAL);
3531
3532 __ bind(&skip_to_incremental_compacting);
3533 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3534
3535 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3536 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003537 DCHECK(Assembler::GetBranchOffset(masm->instr_at(0)) < (1 << 12));
3538 DCHECK(Assembler::GetBranchOffset(masm->instr_at(4)) < (1 << 12));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003539 PatchBranchIntoNop(masm, 0);
3540 PatchBranchIntoNop(masm, Assembler::kInstrSize);
3541}
3542
3543
3544void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3545 regs_.Save(masm);
3546
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003547 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003548 Label dont_need_remembered_set;
3549
3550 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
3551 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3552 regs_.scratch0(),
3553 &dont_need_remembered_set);
3554
Ben Murdoch097c5b22016-05-18 11:27:45 +01003555 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
3556 &dont_need_remembered_set);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003557
3558 // First notify the incremental marker if necessary, then update the
3559 // remembered set.
3560 CheckNeedsToInformIncrementalMarker(
3561 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003562 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003563 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003564 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003565 MacroAssembler::kReturnAtEnd);
3566
3567 __ bind(&dont_need_remembered_set);
3568 }
3569
3570 CheckNeedsToInformIncrementalMarker(
3571 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003572 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003573 regs_.Restore(masm);
3574 __ Ret();
3575}
3576
3577
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003578void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3579 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003580 int argument_count = 3;
3581 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3582 Register address =
3583 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003584 DCHECK(!address.is(regs_.object()));
3585 DCHECK(!address.is(r0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003586 __ Move(address, regs_.address());
3587 __ Move(r0, regs_.object());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003588 __ Move(r1, address);
3589 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003590
3591 AllowExternalCallThatCantCauseGC scope(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003592 __ CallCFunction(
3593 ExternalReference::incremental_marking_record_write_function(isolate()),
3594 argument_count);
3595 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003596}
3597
3598
3599void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3600 MacroAssembler* masm,
3601 OnNoNeedToInformIncrementalMarker on_no_need,
3602 Mode mode) {
3603 Label on_black;
3604 Label need_incremental;
3605 Label need_incremental_pop_scratch;
3606
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003607 __ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
3608 __ ldr(regs_.scratch1(),
3609 MemOperand(regs_.scratch0(),
3610 MemoryChunk::kWriteBarrierCounterOffset));
3611 __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC);
3612 __ str(regs_.scratch1(),
3613 MemOperand(regs_.scratch0(),
3614 MemoryChunk::kWriteBarrierCounterOffset));
3615 __ b(mi, &need_incremental);
3616
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003617 // Let's look at the color of the object: If it is not black we don't have
3618 // to inform the incremental marker.
3619 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
3620
3621 regs_.Restore(masm);
3622 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003623 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003624 MacroAssembler::kReturnAtEnd);
3625 } else {
3626 __ Ret();
3627 }
3628
3629 __ bind(&on_black);
3630
3631 // Get the value from the slot.
3632 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
3633
3634 if (mode == INCREMENTAL_COMPACTION) {
3635 Label ensure_not_white;
3636
3637 __ CheckPageFlag(regs_.scratch0(), // Contains value.
3638 regs_.scratch1(), // Scratch.
3639 MemoryChunk::kEvacuationCandidateMask,
3640 eq,
3641 &ensure_not_white);
3642
3643 __ CheckPageFlag(regs_.object(),
3644 regs_.scratch1(), // Scratch.
3645 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
3646 eq,
3647 &need_incremental);
3648
3649 __ bind(&ensure_not_white);
3650 }
3651
3652 // We need extra registers for this, so we push the object and the address
3653 // register temporarily.
3654 __ Push(regs_.object(), regs_.address());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003655 __ JumpIfWhite(regs_.scratch0(), // The value.
3656 regs_.scratch1(), // Scratch.
3657 regs_.object(), // Scratch.
3658 regs_.address(), // Scratch.
3659 &need_incremental_pop_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003660 __ Pop(regs_.object(), regs_.address());
3661
3662 regs_.Restore(masm);
3663 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003664 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003665 MacroAssembler::kReturnAtEnd);
3666 } else {
3667 __ Ret();
3668 }
3669
3670 __ bind(&need_incremental_pop_scratch);
3671 __ Pop(regs_.object(), regs_.address());
3672
3673 __ bind(&need_incremental);
3674
3675 // Fall through when we need to inform the incremental marker.
3676}
3677
3678
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003679void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
3680 CEntryStub ces(isolate(), 1, kSaveFPRegs);
3681 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
3682 int parameter_count_offset =
Ben Murdochda12d292016-06-02 14:46:10 +01003683 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003684 __ ldr(r1, MemOperand(fp, parameter_count_offset));
3685 if (function_mode() == JS_FUNCTION_STUB_MODE) {
3686 __ add(r1, r1, Operand(1));
3687 }
3688 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3689 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2));
3690 __ add(sp, sp, r1);
3691 __ Ret();
3692}
3693
3694
3695void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003696 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3697 LoadICStub stub(isolate(), state());
3698 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003699}
3700
3701
3702void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003703 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
3704 KeyedLoadICStub stub(isolate(), state());
3705 stub.GenerateForTrampoline(masm);
3706}
3707
3708
3709void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3710 __ EmitLoadTypeFeedbackVector(r2);
3711 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003712 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3713}
3714
3715
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003716void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
3717
3718
3719void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3720 GenerateImpl(masm, true);
3721}
3722
3723
3724static void HandleArrayCases(MacroAssembler* masm, Register feedback,
3725 Register receiver_map, Register scratch1,
3726 Register scratch2, bool is_polymorphic,
3727 Label* miss) {
3728 // feedback initially contains the feedback array
3729 Label next_loop, prepare_next;
3730 Label start_polymorphic;
3731
3732 Register cached_map = scratch1;
3733
3734 __ ldr(cached_map,
3735 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0)));
3736 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3737 __ cmp(receiver_map, cached_map);
3738 __ b(ne, &start_polymorphic);
3739 // found, now call handler.
3740 Register handler = feedback;
3741 __ ldr(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1)));
3742 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3743
3744
3745 Register length = scratch2;
3746 __ bind(&start_polymorphic);
3747 __ ldr(length, FieldMemOperand(feedback, FixedArray::kLengthOffset));
3748 if (!is_polymorphic) {
3749 // If the IC could be monomorphic we have to make sure we don't go past the
3750 // end of the feedback array.
3751 __ cmp(length, Operand(Smi::FromInt(2)));
3752 __ b(eq, miss);
3753 }
3754
3755 Register too_far = length;
3756 Register pointer_reg = feedback;
3757
3758 // +-----+------+------+-----+-----+ ... ----+
3759 // | map | len | wm0 | h0 | wm1 | hN |
3760 // +-----+------+------+-----+-----+ ... ----+
3761 // 0 1 2 len-1
3762 // ^ ^
3763 // | |
3764 // pointer_reg too_far
3765 // aka feedback scratch2
3766 // also need receiver_map
3767 // use cached_map (scratch1) to look in the weak map values.
3768 __ add(too_far, feedback, Operand::PointerOffsetFromSmiKey(length));
3769 __ add(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3770 __ add(pointer_reg, feedback,
3771 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag));
3772
3773 __ bind(&next_loop);
3774 __ ldr(cached_map, MemOperand(pointer_reg));
3775 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
3776 __ cmp(receiver_map, cached_map);
3777 __ b(ne, &prepare_next);
3778 __ ldr(handler, MemOperand(pointer_reg, kPointerSize));
3779 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3780
3781 __ bind(&prepare_next);
3782 __ add(pointer_reg, pointer_reg, Operand(kPointerSize * 2));
3783 __ cmp(pointer_reg, too_far);
3784 __ b(lt, &next_loop);
3785
3786 // We exhausted our array of map handler pairs.
3787 __ jmp(miss);
3788}
3789
3790
3791static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
3792 Register receiver_map, Register feedback,
3793 Register vector, Register slot,
3794 Register scratch, Label* compare_map,
3795 Label* load_smi_map, Label* try_array) {
3796 __ JumpIfSmi(receiver, load_smi_map);
3797 __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
3798 __ bind(compare_map);
3799 Register cached_map = scratch;
3800 // Move the weak map into the weak_cell register.
3801 __ ldr(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset));
3802 __ cmp(cached_map, receiver_map);
3803 __ b(ne, try_array);
3804 Register handler = feedback;
3805 __ add(handler, vector, Operand::PointerOffsetFromSmiKey(slot));
3806 __ ldr(handler,
3807 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
3808 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
3809}
3810
3811
3812void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3813 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
3814 Register name = LoadWithVectorDescriptor::NameRegister(); // r2
3815 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
3816 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
3817 Register feedback = r4;
3818 Register receiver_map = r5;
3819 Register scratch1 = r6;
3820
3821 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3822 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3823
3824 // Try to quickly handle the monomorphic case without knowing for sure
3825 // if we have a weak cell in feedback. We do know it's safe to look
3826 // at WeakCell::kValueOffset.
3827 Label try_array, load_smi_map, compare_map;
3828 Label not_array, miss;
3829 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3830 scratch1, &compare_map, &load_smi_map, &try_array);
3831
3832 // Is it a fixed array?
3833 __ bind(&try_array);
3834 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3835 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
3836 __ b(ne, &not_array);
3837 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, true, &miss);
3838
3839 __ bind(&not_array);
3840 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3841 __ b(ne, &miss);
3842 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
3843 Code::ComputeHandlerFlags(Code::LOAD_IC));
3844 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
3845 receiver, name, feedback,
3846 receiver_map, scratch1, r9);
3847
3848 __ bind(&miss);
3849 LoadIC::GenerateMiss(masm);
3850
3851 __ bind(&load_smi_map);
3852 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3853 __ jmp(&compare_map);
3854}
3855
3856
3857void KeyedLoadICStub::Generate(MacroAssembler* masm) {
3858 GenerateImpl(masm, false);
3859}
3860
3861
3862void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
3863 GenerateImpl(masm, true);
3864}
3865
3866
3867void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3868 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
3869 Register key = LoadWithVectorDescriptor::NameRegister(); // r2
3870 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
3871 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
3872 Register feedback = r4;
3873 Register receiver_map = r5;
3874 Register scratch1 = r6;
3875
3876 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3877 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3878
3879 // Try to quickly handle the monomorphic case without knowing for sure
3880 // if we have a weak cell in feedback. We do know it's safe to look
3881 // at WeakCell::kValueOffset.
3882 Label try_array, load_smi_map, compare_map;
3883 Label not_array, miss;
3884 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3885 scratch1, &compare_map, &load_smi_map, &try_array);
3886
3887 __ bind(&try_array);
3888 // Is it a fixed array?
3889 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3890 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
3891 __ b(ne, &not_array);
3892
3893 // We have a polymorphic element handler.
3894 Label polymorphic, try_poly_name;
3895 __ bind(&polymorphic);
3896 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, true, &miss);
3897
3898 __ bind(&not_array);
3899 // Is it generic?
3900 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3901 __ b(ne, &try_poly_name);
3902 Handle<Code> megamorphic_stub =
3903 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3904 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
3905
3906 __ bind(&try_poly_name);
3907 // We might have a name in feedback, and a fixed array in the next slot.
3908 __ cmp(key, feedback);
3909 __ b(ne, &miss);
3910 // If the name comparison succeeded, we know we have a fixed array with
3911 // at least one map/handler pair.
3912 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3913 __ ldr(feedback,
3914 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
3915 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, false, &miss);
3916
3917 __ bind(&miss);
3918 KeyedLoadIC::GenerateMiss(masm);
3919
3920 __ bind(&load_smi_map);
3921 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3922 __ jmp(&compare_map);
3923}
3924
3925
3926void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3927 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3928 VectorStoreICStub stub(isolate(), state());
3929 stub.GenerateForTrampoline(masm);
3930}
3931
3932
3933void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
3934 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
3935 VectorKeyedStoreICStub stub(isolate(), state());
3936 stub.GenerateForTrampoline(masm);
3937}
3938
3939
3940void VectorStoreICStub::Generate(MacroAssembler* masm) {
3941 GenerateImpl(masm, false);
3942}
3943
3944
3945void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
3946 GenerateImpl(masm, true);
3947}
3948
3949
3950void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3951 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
3952 Register key = VectorStoreICDescriptor::NameRegister(); // r2
3953 Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
3954 Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
3955 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
3956 Register feedback = r5;
3957 Register receiver_map = r6;
3958 Register scratch1 = r9;
3959
3960 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
3961 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
3962
3963 // Try to quickly handle the monomorphic case without knowing for sure
3964 // if we have a weak cell in feedback. We do know it's safe to look
3965 // at WeakCell::kValueOffset.
3966 Label try_array, load_smi_map, compare_map;
3967 Label not_array, miss;
3968 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
3969 scratch1, &compare_map, &load_smi_map, &try_array);
3970
3971 // Is it a fixed array?
3972 __ bind(&try_array);
3973 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
3974 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
3975 __ b(ne, &not_array);
3976
3977 // We are using register r8, which is used for the embedded constant pool
3978 // when FLAG_enable_embedded_constant_pool is true.
3979 DCHECK(!FLAG_enable_embedded_constant_pool);
3980 Register scratch2 = r8;
3981 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true,
3982 &miss);
3983
3984 __ bind(&not_array);
3985 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
3986 __ b(ne, &miss);
3987 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
3988 Code::ComputeHandlerFlags(Code::STORE_IC));
3989 masm->isolate()->stub_cache()->GenerateProbe(
3990 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map,
3991 scratch1, scratch2);
3992
3993 __ bind(&miss);
3994 StoreIC::GenerateMiss(masm);
3995
3996 __ bind(&load_smi_map);
3997 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3998 __ jmp(&compare_map);
3999}
4000
4001
4002void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
4003 GenerateImpl(masm, false);
4004}
4005
4006
4007void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4008 GenerateImpl(masm, true);
4009}
4010
4011
4012static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
4013 Register receiver_map, Register scratch1,
4014 Register scratch2, Label* miss) {
4015 // feedback initially contains the feedback array
4016 Label next_loop, prepare_next;
4017 Label start_polymorphic;
4018 Label transition_call;
4019
4020 Register cached_map = scratch1;
4021 Register too_far = scratch2;
4022 Register pointer_reg = feedback;
4023 __ ldr(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset));
4024
4025 // +-----+------+------+-----+-----+-----+ ... ----+
4026 // | map | len | wm0 | wt0 | h0 | wm1 | hN |
4027 // +-----+------+------+-----+-----+ ----+ ... ----+
4028 // 0 1 2 len-1
4029 // ^ ^
4030 // | |
4031 // pointer_reg too_far
4032 // aka feedback scratch2
4033 // also need receiver_map
4034 // use cached_map (scratch1) to look in the weak map values.
4035 __ add(too_far, feedback, Operand::PointerOffsetFromSmiKey(too_far));
4036 __ add(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4037 __ add(pointer_reg, feedback,
4038 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag));
4039
4040 __ bind(&next_loop);
4041 __ ldr(cached_map, MemOperand(pointer_reg));
4042 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
4043 __ cmp(receiver_map, cached_map);
4044 __ b(ne, &prepare_next);
4045 // Is it a transitioning store?
4046 __ ldr(too_far, MemOperand(pointer_reg, kPointerSize));
4047 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex);
4048 __ b(ne, &transition_call);
4049 __ ldr(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2));
4050 __ add(pc, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
4051
4052 __ bind(&transition_call);
4053 __ ldr(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset));
4054 __ JumpIfSmi(too_far, miss);
4055
4056 __ ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
4057
4058 // Load the map into the correct register.
4059 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
4060 __ mov(feedback, too_far);
4061
4062 __ add(pc, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
4063
4064 __ bind(&prepare_next);
4065 __ add(pointer_reg, pointer_reg, Operand(kPointerSize * 3));
4066 __ cmp(pointer_reg, too_far);
4067 __ b(lt, &next_loop);
4068
4069 // We exhausted our array of map handler pairs.
4070 __ jmp(miss);
4071}
4072
4073
4074void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4075 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
4076 Register key = VectorStoreICDescriptor::NameRegister(); // r2
4077 Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
4078 Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
4079 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
4080 Register feedback = r5;
4081 Register receiver_map = r6;
4082 Register scratch1 = r9;
4083
4084 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4085 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4086
4087 // Try to quickly handle the monomorphic case without knowing for sure
4088 // if we have a weak cell in feedback. We do know it's safe to look
4089 // at WeakCell::kValueOffset.
4090 Label try_array, load_smi_map, compare_map;
4091 Label not_array, miss;
4092 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4093 scratch1, &compare_map, &load_smi_map, &try_array);
4094
4095 __ bind(&try_array);
4096 // Is it a fixed array?
4097 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4098 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
4099 __ b(ne, &not_array);
4100
4101 // We have a polymorphic element handler.
4102 Label polymorphic, try_poly_name;
4103 __ bind(&polymorphic);
4104
4105 // We are using register r8, which is used for the embedded constant pool
4106 // when FLAG_enable_embedded_constant_pool is true.
4107 DCHECK(!FLAG_enable_embedded_constant_pool);
4108 Register scratch2 = r8;
4109
4110 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2,
4111 &miss);
4112
4113 __ bind(&not_array);
4114 // Is it generic?
4115 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4116 __ b(ne, &try_poly_name);
4117 Handle<Code> megamorphic_stub =
4118 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4119 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
4120
4121 __ bind(&try_poly_name);
4122 // We might have a name in feedback, and a fixed array in the next slot.
4123 __ cmp(key, feedback);
4124 __ b(ne, &miss);
4125 // If the name comparison succeeded, we know we have a fixed array with
4126 // at least one map/handler pair.
4127 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4128 __ ldr(feedback,
4129 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
4130 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false,
4131 &miss);
4132
4133 __ bind(&miss);
4134 KeyedStoreIC::GenerateMiss(masm);
4135
4136 __ bind(&load_smi_map);
4137 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4138 __ jmp(&compare_map);
4139}
4140
4141
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004142void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4143 if (masm->isolate()->function_entry_hook() != NULL) {
4144 ProfileEntryHookStub stub(masm->isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004145 PredictableCodeSizeScope predictable(masm);
4146 predictable.ExpectSize(masm->CallStubSize(&stub) +
4147 2 * Assembler::kInstrSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004148 __ push(lr);
4149 __ CallStub(&stub);
4150 __ pop(lr);
4151 }
4152}
4153
4154
4155void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4156 // The entry hook is a "push lr" instruction, followed by a call.
4157 const int32_t kReturnAddressDistanceFromFunctionStart =
4158 3 * Assembler::kInstrSize;
4159
4160 // This should contain all kCallerSaved registers.
4161 const RegList kSavedRegs =
4162 1 << 0 | // r0
4163 1 << 1 | // r1
4164 1 << 2 | // r2
4165 1 << 3 | // r3
4166 1 << 5 | // r5
4167 1 << 9; // r9
4168 // We also save lr, so the count here is one higher than the mask indicates.
4169 const int32_t kNumSavedRegs = 7;
4170
4171 DCHECK((kCallerSaved & kSavedRegs) == kCallerSaved);
4172
4173 // Save all caller-save registers as this may be called from anywhere.
4174 __ stm(db_w, sp, kSavedRegs | lr.bit());
4175
4176 // Compute the function's address for the first argument.
4177 __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart));
4178
4179 // The caller's return address is above the saved temporaries.
4180 // Grab that for the second argument to the hook.
4181 __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize));
4182
4183 // Align the stack if necessary.
4184 int frame_alignment = masm->ActivationFrameAlignment();
4185 if (frame_alignment > kPointerSize) {
4186 __ mov(r5, sp);
4187 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
4188 __ and_(sp, sp, Operand(-frame_alignment));
4189 }
4190
4191#if V8_HOST_ARCH_ARM
4192 int32_t entry_hook =
4193 reinterpret_cast<int32_t>(isolate()->function_entry_hook());
4194 __ mov(ip, Operand(entry_hook));
4195#else
4196 // Under the simulator we need to indirect the entry hook through a
4197 // trampoline function at a known address.
4198 // It additionally takes an isolate as a third parameter
4199 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
4200
4201 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
4202 __ mov(ip, Operand(ExternalReference(&dispatcher,
4203 ExternalReference::BUILTIN_CALL,
4204 isolate())));
4205#endif
4206 __ Call(ip);
4207
4208 // Restore the stack pointer if needed.
4209 if (frame_alignment > kPointerSize) {
4210 __ mov(sp, r5);
4211 }
4212
4213 // Also pop pc to get Ret(0).
4214 __ ldm(ia_w, sp, kSavedRegs | pc.bit());
4215}
4216
4217
4218template<class T>
4219static void CreateArrayDispatch(MacroAssembler* masm,
4220 AllocationSiteOverrideMode mode) {
4221 if (mode == DISABLE_ALLOCATION_SITES) {
4222 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4223 __ TailCallStub(&stub);
4224 } else if (mode == DONT_OVERRIDE) {
4225 int last_index = GetSequenceIndexFromFastElementsKind(
4226 TERMINAL_FAST_ELEMENTS_KIND);
4227 for (int i = 0; i <= last_index; ++i) {
4228 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4229 __ cmp(r3, Operand(kind));
4230 T stub(masm->isolate(), kind);
4231 __ TailCallStub(&stub, eq);
4232 }
4233
4234 // If we reached this point there is a problem.
4235 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4236 } else {
4237 UNREACHABLE();
4238 }
4239}
4240
4241
4242static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4243 AllocationSiteOverrideMode mode) {
4244 // r2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4245 // r3 - kind (if mode != DISABLE_ALLOCATION_SITES)
4246 // r0 - number of arguments
4247 // r1 - constructor?
4248 // sp[0] - last argument
4249 Label normal_sequence;
4250 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004251 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4252 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4253 STATIC_ASSERT(FAST_ELEMENTS == 2);
4254 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4255 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4256 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004257
4258 // is the low bit set? If so, we are holey and that is good.
4259 __ tst(r3, Operand(1));
4260 __ b(ne, &normal_sequence);
4261 }
4262
4263 // look at the first argument
4264 __ ldr(r5, MemOperand(sp, 0));
4265 __ cmp(r5, Operand::Zero());
4266 __ b(eq, &normal_sequence);
4267
4268 if (mode == DISABLE_ALLOCATION_SITES) {
4269 ElementsKind initial = GetInitialFastElementsKind();
4270 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4271
4272 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4273 holey_initial,
4274 DISABLE_ALLOCATION_SITES);
4275 __ TailCallStub(&stub_holey);
4276
4277 __ bind(&normal_sequence);
4278 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4279 initial,
4280 DISABLE_ALLOCATION_SITES);
4281 __ TailCallStub(&stub);
4282 } else if (mode == DONT_OVERRIDE) {
4283 // We are going to create a holey array, but our kind is non-holey.
4284 // Fix kind and retry (only if we have an allocation site in the slot).
4285 __ add(r3, r3, Operand(1));
4286
4287 if (FLAG_debug_code) {
4288 __ ldr(r5, FieldMemOperand(r2, 0));
4289 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
4290 __ Assert(eq, kExpectedAllocationSite);
4291 }
4292
4293 // Save the resulting elements kind in type info. We can't just store r3
4294 // in the AllocationSite::transition_info field because elements kind is
4295 // restricted to a portion of the field...upper bits need to be left alone.
4296 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4297 __ ldr(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4298 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
4299 __ str(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4300
4301 __ bind(&normal_sequence);
4302 int last_index = GetSequenceIndexFromFastElementsKind(
4303 TERMINAL_FAST_ELEMENTS_KIND);
4304 for (int i = 0; i <= last_index; ++i) {
4305 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4306 __ cmp(r3, Operand(kind));
4307 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4308 __ TailCallStub(&stub, eq);
4309 }
4310
4311 // If we reached this point there is a problem.
4312 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4313 } else {
4314 UNREACHABLE();
4315 }
4316}
4317
4318
4319template<class T>
4320static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4321 int to_index = GetSequenceIndexFromFastElementsKind(
4322 TERMINAL_FAST_ELEMENTS_KIND);
4323 for (int i = 0; i <= to_index; ++i) {
4324 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4325 T stub(isolate, kind);
4326 stub.GetCode();
4327 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4328 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4329 stub1.GetCode();
4330 }
4331 }
4332}
4333
4334
4335void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4336 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4337 isolate);
4338 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4339 isolate);
4340 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4341 isolate);
4342}
4343
4344
4345void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4346 Isolate* isolate) {
4347 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4348 for (int i = 0; i < 2; i++) {
4349 // For internal arrays we only need a few things
4350 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4351 stubh1.GetCode();
4352 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4353 stubh2.GetCode();
4354 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4355 stubh3.GetCode();
4356 }
4357}
4358
4359
4360void ArrayConstructorStub::GenerateDispatchToArrayStub(
4361 MacroAssembler* masm,
4362 AllocationSiteOverrideMode mode) {
4363 if (argument_count() == ANY) {
4364 Label not_zero_case, not_one_case;
4365 __ tst(r0, r0);
4366 __ b(ne, &not_zero_case);
4367 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4368
4369 __ bind(&not_zero_case);
4370 __ cmp(r0, Operand(1));
4371 __ b(gt, &not_one_case);
4372 CreateArrayDispatchOneArgument(masm, mode);
4373
4374 __ bind(&not_one_case);
4375 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4376 } else if (argument_count() == NONE) {
4377 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4378 } else if (argument_count() == ONE) {
4379 CreateArrayDispatchOneArgument(masm, mode);
4380 } else if (argument_count() == MORE_THAN_ONE) {
4381 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4382 } else {
4383 UNREACHABLE();
4384 }
4385}
4386
4387
4388void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4389 // ----------- S t a t e -------------
4390 // -- r0 : argc (only if argument_count() == ANY)
4391 // -- r1 : constructor
4392 // -- r2 : AllocationSite or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004393 // -- r3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004394 // -- sp[0] : return address
4395 // -- sp[4] : last argument
4396 // -----------------------------------
4397
4398 if (FLAG_debug_code) {
4399 // The array construct code is only set for the global and natives
4400 // builtin Array functions which always have maps.
4401
4402 // Initial map for the builtin Array function should be a map.
4403 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4404 // Will both indicate a NULL and a Smi.
4405 __ tst(r4, Operand(kSmiTagMask));
4406 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
4407 __ CompareObjectType(r4, r4, r5, MAP_TYPE);
4408 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
4409
4410 // We should either have undefined in r2 or a valid AllocationSite
4411 __ AssertUndefinedOrAllocationSite(r2, r4);
4412 }
4413
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004414 // Enter the context of the Array function.
4415 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
4416
4417 Label subclassing;
4418 __ cmp(r3, r1);
4419 __ b(ne, &subclassing);
4420
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004421 Label no_info;
4422 // Get the elements kind and case on that.
4423 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
4424 __ b(eq, &no_info);
4425
4426 __ ldr(r3, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4427 __ SmiUntag(r3);
4428 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4429 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
4430 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4431
4432 __ bind(&no_info);
4433 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004434
4435 __ bind(&subclassing);
4436 switch (argument_count()) {
4437 case ANY:
4438 case MORE_THAN_ONE:
4439 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4440 __ add(r0, r0, Operand(3));
4441 break;
4442 case NONE:
4443 __ str(r1, MemOperand(sp, 0 * kPointerSize));
4444 __ mov(r0, Operand(3));
4445 break;
4446 case ONE:
4447 __ str(r1, MemOperand(sp, 1 * kPointerSize));
4448 __ mov(r0, Operand(4));
4449 break;
4450 }
4451 __ Push(r3, r2);
4452 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004453}
4454
4455
4456void InternalArrayConstructorStub::GenerateCase(
4457 MacroAssembler* masm, ElementsKind kind) {
4458 __ cmp(r0, Operand(1));
4459
4460 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4461 __ TailCallStub(&stub0, lo);
4462
4463 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4464 __ TailCallStub(&stubN, hi);
4465
4466 if (IsFastPackedElementsKind(kind)) {
4467 // We might need to create a holey array
4468 // look at the first argument
4469 __ ldr(r3, MemOperand(sp, 0));
4470 __ cmp(r3, Operand::Zero());
4471
4472 InternalArraySingleArgumentConstructorStub
4473 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4474 __ TailCallStub(&stub1_holey, ne);
4475 }
4476
4477 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4478 __ TailCallStub(&stub1);
4479}
4480
4481
4482void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4483 // ----------- S t a t e -------------
4484 // -- r0 : argc
4485 // -- r1 : constructor
4486 // -- sp[0] : return address
4487 // -- sp[4] : last argument
4488 // -----------------------------------
4489
4490 if (FLAG_debug_code) {
4491 // The array construct code is only set for the global and natives
4492 // builtin Array functions which always have maps.
4493
4494 // Initial map for the builtin Array function should be a map.
4495 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4496 // Will both indicate a NULL and a Smi.
4497 __ tst(r3, Operand(kSmiTagMask));
4498 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
4499 __ CompareObjectType(r3, r3, r4, MAP_TYPE);
4500 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
4501 }
4502
4503 // Figure out the right elements kind
4504 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4505 // Load the map's "bit field 2" into |result|. We only need the first byte,
4506 // but the following bit field extraction takes care of that anyway.
4507 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset));
4508 // Retrieve elements_kind from bit field 2.
4509 __ DecodeField<Map::ElementsKindBits>(r3);
4510
4511 if (FLAG_debug_code) {
4512 Label done;
4513 __ cmp(r3, Operand(FAST_ELEMENTS));
4514 __ b(eq, &done);
4515 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
4516 __ Assert(eq,
4517 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4518 __ bind(&done);
4519 }
4520
4521 Label fast_elements_case;
4522 __ cmp(r3, Operand(FAST_ELEMENTS));
4523 __ b(eq, &fast_elements_case);
4524 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4525
4526 __ bind(&fast_elements_case);
4527 GenerateCase(masm, FAST_ELEMENTS);
4528}
4529
4530
Ben Murdoch097c5b22016-05-18 11:27:45 +01004531void FastNewObjectStub::Generate(MacroAssembler* masm) {
4532 // ----------- S t a t e -------------
4533 // -- r1 : target
4534 // -- r3 : new target
4535 // -- cp : context
4536 // -- lr : return address
4537 // -----------------------------------
4538 __ AssertFunction(r1);
4539 __ AssertReceiver(r3);
4540
4541 // Verify that the new target is a JSFunction.
4542 Label new_object;
4543 __ CompareObjectType(r3, r2, r2, JS_FUNCTION_TYPE);
4544 __ b(ne, &new_object);
4545
4546 // Load the initial map and verify that it's in fact a map.
4547 __ ldr(r2, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
4548 __ JumpIfSmi(r2, &new_object);
4549 __ CompareObjectType(r2, r0, r0, MAP_TYPE);
4550 __ b(ne, &new_object);
4551
4552 // Fall back to runtime if the target differs from the new target's
4553 // initial map constructor.
4554 __ ldr(r0, FieldMemOperand(r2, Map::kConstructorOrBackPointerOffset));
4555 __ cmp(r0, r1);
4556 __ b(ne, &new_object);
4557
4558 // Allocate the JSObject on the heap.
4559 Label allocate, done_allocate;
4560 __ ldrb(r4, FieldMemOperand(r2, Map::kInstanceSizeOffset));
4561 __ Allocate(r4, r0, r5, r6, &allocate, SIZE_IN_WORDS);
4562 __ bind(&done_allocate);
4563
4564 // Initialize the JSObject fields.
4565 __ str(r2, MemOperand(r0, JSObject::kMapOffset));
4566 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
4567 __ str(r3, MemOperand(r0, JSObject::kPropertiesOffset));
4568 __ str(r3, MemOperand(r0, JSObject::kElementsOffset));
4569 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
4570 __ add(r1, r0, Operand(JSObject::kHeaderSize));
4571
4572 // ----------- S t a t e -------------
4573 // -- r0 : result (untagged)
4574 // -- r1 : result fields (untagged)
4575 // -- r5 : result end (untagged)
4576 // -- r2 : initial map
4577 // -- cp : context
4578 // -- lr : return address
4579 // -----------------------------------
4580
4581 // Perform in-object slack tracking if requested.
4582 Label slack_tracking;
4583 STATIC_ASSERT(Map::kNoSlackTracking == 0);
4584 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
4585 __ ldr(r3, FieldMemOperand(r2, Map::kBitField3Offset));
4586 __ tst(r3, Operand(Map::ConstructionCounter::kMask));
4587 __ b(ne, &slack_tracking);
4588 {
4589 // Initialize all in-object fields with undefined.
4590 __ InitializeFieldsWithFiller(r1, r5, r6);
4591
4592 // Add the object tag to make the JSObject real.
4593 STATIC_ASSERT(kHeapObjectTag == 1);
4594 __ add(r0, r0, Operand(kHeapObjectTag));
4595 __ Ret();
4596 }
4597 __ bind(&slack_tracking);
4598 {
4599 // Decrease generous allocation count.
4600 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
4601 __ sub(r3, r3, Operand(1 << Map::ConstructionCounter::kShift));
4602 __ str(r3, FieldMemOperand(r2, Map::kBitField3Offset));
4603
4604 // Initialize the in-object fields with undefined.
4605 __ ldrb(r4, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
4606 __ sub(r4, r5, Operand(r4, LSL, kPointerSizeLog2));
4607 __ InitializeFieldsWithFiller(r1, r4, r6);
4608
4609 // Initialize the remaining (reserved) fields with one pointer filler map.
4610 __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
4611 __ InitializeFieldsWithFiller(r1, r5, r6);
4612
4613 // Add the object tag to make the JSObject real.
4614 STATIC_ASSERT(kHeapObjectTag == 1);
4615 __ add(r0, r0, Operand(kHeapObjectTag));
4616
4617 // Check if we can finalize the instance size.
4618 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
4619 __ tst(r3, Operand(Map::ConstructionCounter::kMask));
4620 __ Ret(ne);
4621
4622 // Finalize the instance size.
4623 {
4624 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4625 __ Push(r0, r2);
4626 __ CallRuntime(Runtime::kFinalizeInstanceSize);
4627 __ Pop(r0);
4628 }
4629 __ Ret();
4630 }
4631
4632 // Fall back to %AllocateInNewSpace.
4633 __ bind(&allocate);
4634 {
4635 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4636 STATIC_ASSERT(kSmiTag == 0);
4637 STATIC_ASSERT(kSmiTagSize == 1);
4638 __ mov(r4, Operand(r4, LSL, kPointerSizeLog2 + 1));
4639 __ Push(r2, r4);
4640 __ CallRuntime(Runtime::kAllocateInNewSpace);
4641 __ Pop(r2);
4642 }
4643 STATIC_ASSERT(kHeapObjectTag == 1);
4644 __ sub(r0, r0, Operand(kHeapObjectTag));
4645 __ ldrb(r5, FieldMemOperand(r2, Map::kInstanceSizeOffset));
4646 __ add(r5, r0, Operand(r5, LSL, kPointerSizeLog2));
4647 __ b(&done_allocate);
4648
4649 // Fall back to %NewObject.
4650 __ bind(&new_object);
4651 __ Push(r1, r3);
4652 __ TailCallRuntime(Runtime::kNewObject);
4653}
4654
4655
4656void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
4657 // ----------- S t a t e -------------
4658 // -- r1 : function
4659 // -- cp : context
4660 // -- fp : frame pointer
4661 // -- lr : return address
4662 // -----------------------------------
4663 __ AssertFunction(r1);
4664
4665 // For Ignition we need to skip all possible handler/stub frames until
4666 // we reach the JavaScript frame for the function (similar to what the
4667 // runtime fallback implementation does). So make r2 point to that
4668 // JavaScript frame.
4669 {
4670 Label loop, loop_entry;
4671 __ mov(r2, fp);
4672 __ b(&loop_entry);
4673 __ bind(&loop);
4674 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
4675 __ bind(&loop_entry);
Ben Murdochda12d292016-06-02 14:46:10 +01004676 __ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004677 __ cmp(ip, r1);
4678 __ b(ne, &loop);
4679 }
4680
4681 // Check if we have rest parameters (only possible if we have an
4682 // arguments adaptor frame below the function frame).
4683 Label no_rest_parameters;
4684 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004685 __ ldr(ip, MemOperand(r2, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004686 __ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4687 __ b(ne, &no_rest_parameters);
4688
4689 // Check if the arguments adaptor frame contains more arguments than
4690 // specified by the function's internal formal parameter count.
4691 Label rest_parameters;
4692 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4693 __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
4694 __ ldr(r1,
4695 FieldMemOperand(r1, SharedFunctionInfo::kFormalParameterCountOffset));
4696 __ sub(r0, r0, r1, SetCC);
4697 __ b(gt, &rest_parameters);
4698
4699 // Return an empty rest parameter array.
4700 __ bind(&no_rest_parameters);
4701 {
4702 // ----------- S t a t e -------------
4703 // -- cp : context
4704 // -- lr : return address
4705 // -----------------------------------
4706
4707 // Allocate an empty rest parameter array.
4708 Label allocate, done_allocate;
4709 __ Allocate(JSArray::kSize, r0, r1, r2, &allocate, TAG_OBJECT);
4710 __ bind(&done_allocate);
4711
4712 // Setup the rest parameter array in r0.
4713 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
4714 __ str(r1, FieldMemOperand(r0, JSArray::kMapOffset));
4715 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
4716 __ str(r1, FieldMemOperand(r0, JSArray::kPropertiesOffset));
4717 __ str(r1, FieldMemOperand(r0, JSArray::kElementsOffset));
4718 __ mov(r1, Operand(0));
4719 __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
4720 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4721 __ Ret();
4722
4723 // Fall back to %AllocateInNewSpace.
4724 __ bind(&allocate);
4725 {
4726 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4727 __ Push(Smi::FromInt(JSArray::kSize));
4728 __ CallRuntime(Runtime::kAllocateInNewSpace);
4729 }
4730 __ jmp(&done_allocate);
4731 }
4732
4733 __ bind(&rest_parameters);
4734 {
4735 // Compute the pointer to the first rest parameter (skippping the receiver).
4736 __ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
4737 __ add(r2, r2,
4738 Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4739
4740 // ----------- S t a t e -------------
4741 // -- cp : context
4742 // -- r0 : number of rest parameters (tagged)
4743 // -- r2 : pointer to first rest parameters
4744 // -- lr : return address
4745 // -----------------------------------
4746
4747 // Allocate space for the rest parameter array plus the backing store.
4748 Label allocate, done_allocate;
4749 __ mov(r1, Operand(JSArray::kSize + FixedArray::kHeaderSize));
4750 __ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1));
4751 __ Allocate(r1, r3, r4, r5, &allocate, TAG_OBJECT);
4752 __ bind(&done_allocate);
4753
4754 // Setup the elements array in r3.
4755 __ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
4756 __ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
4757 __ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
4758 __ add(r4, r3, Operand(FixedArray::kHeaderSize));
4759 {
4760 Label loop, done_loop;
4761 __ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
4762 __ bind(&loop);
4763 __ cmp(r4, r1);
4764 __ b(eq, &done_loop);
4765 __ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
4766 __ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
4767 __ add(r4, r4, Operand(1 * kPointerSize));
4768 __ b(&loop);
4769 __ bind(&done_loop);
4770 }
4771
4772 // Setup the rest parameter array in r4.
4773 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, r1);
4774 __ str(r1, FieldMemOperand(r4, JSArray::kMapOffset));
4775 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
4776 __ str(r1, FieldMemOperand(r4, JSArray::kPropertiesOffset));
4777 __ str(r3, FieldMemOperand(r4, JSArray::kElementsOffset));
4778 __ str(r0, FieldMemOperand(r4, JSArray::kLengthOffset));
4779 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
4780 __ mov(r0, r4);
4781 __ Ret();
4782
4783 // Fall back to %AllocateInNewSpace.
4784 __ bind(&allocate);
4785 {
4786 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
4787 __ SmiTag(r1);
4788 __ Push(r0, r2, r1);
4789 __ CallRuntime(Runtime::kAllocateInNewSpace);
4790 __ mov(r3, r0);
4791 __ Pop(r0, r2);
4792 }
4793 __ jmp(&done_allocate);
4794 }
4795}
4796
4797
4798void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4799 // ----------- S t a t e -------------
4800 // -- r1 : function
4801 // -- cp : context
4802 // -- fp : frame pointer
4803 // -- lr : return address
4804 // -----------------------------------
4805 __ AssertFunction(r1);
4806
4807 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4808 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
4809 __ ldr(r2,
4810 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
4811 __ add(r3, fp, Operand(r2, LSL, kPointerSizeLog2 - 1));
4812 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
4813
4814 // r1 : function
4815 // r2 : number of parameters (tagged)
4816 // r3 : parameters pointer
4817 // Registers used over whole function:
4818 // r5 : arguments count (tagged)
4819 // r6 : mapped parameter count (tagged)
4820
4821 // Check if the calling frame is an arguments adaptor frame.
4822 Label adaptor_frame, try_allocate, runtime;
4823 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01004824 __ ldr(r0, MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01004825 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4826 __ b(eq, &adaptor_frame);
4827
4828 // No adaptor, parameter count = argument count.
4829 __ mov(r5, r2);
4830 __ mov(r6, r2);
4831 __ b(&try_allocate);
4832
4833 // We have an adaptor frame. Patch the parameters pointer.
4834 __ bind(&adaptor_frame);
4835 __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
4836 __ add(r4, r4, Operand(r5, LSL, 1));
4837 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
4838
4839 // r5 = argument count (tagged)
4840 // r6 = parameter count (tagged)
4841 // Compute the mapped parameter count = min(r6, r5) in r6.
4842 __ mov(r6, r2);
4843 __ cmp(r6, Operand(r5));
4844 __ mov(r6, Operand(r5), LeaveCC, gt);
4845
4846 __ bind(&try_allocate);
4847
4848 // Compute the sizes of backing store, parameter map, and arguments object.
4849 // 1. Parameter map, has 2 extra words containing context and backing store.
4850 const int kParameterMapHeaderSize =
4851 FixedArray::kHeaderSize + 2 * kPointerSize;
4852 // If there are no mapped parameters, we do not need the parameter_map.
4853 __ cmp(r6, Operand(Smi::FromInt(0)));
4854 __ mov(r9, Operand::Zero(), LeaveCC, eq);
4855 __ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
4856 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
4857
4858 // 2. Backing store.
4859 __ add(r9, r9, Operand(r5, LSL, 1));
4860 __ add(r9, r9, Operand(FixedArray::kHeaderSize));
4861
4862 // 3. Arguments object.
4863 __ add(r9, r9, Operand(JSSloppyArgumentsObject::kSize));
4864
4865 // Do the allocation of all three objects in one go.
4866 __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT);
4867
4868 // r0 = address of new object(s) (tagged)
4869 // r2 = argument count (smi-tagged)
4870 // Get the arguments boilerplate from the current native context into r4.
4871 const int kNormalOffset =
4872 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
4873 const int kAliasedOffset =
4874 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
4875
4876 __ ldr(r4, NativeContextMemOperand());
4877 __ cmp(r6, Operand::Zero());
4878 __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
4879 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
4880
4881 // r0 = address of new object (tagged)
4882 // r2 = argument count (smi-tagged)
4883 // r4 = address of arguments map (tagged)
4884 // r6 = mapped parameter count (tagged)
4885 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
4886 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
4887 __ str(r9, FieldMemOperand(r0, JSObject::kPropertiesOffset));
4888 __ str(r9, FieldMemOperand(r0, JSObject::kElementsOffset));
4889
4890 // Set up the callee in-object property.
4891 __ AssertNotSmi(r1);
4892 __ str(r1, FieldMemOperand(r0, JSSloppyArgumentsObject::kCalleeOffset));
4893
4894 // Use the length (smi tagged) and set that as an in-object property too.
4895 __ AssertSmi(r5);
4896 __ str(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
4897
4898 // Set up the elements pointer in the allocated arguments object.
4899 // If we allocated a parameter map, r4 will point there, otherwise
4900 // it will point to the backing store.
4901 __ add(r4, r0, Operand(JSSloppyArgumentsObject::kSize));
4902 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
4903
4904 // r0 = address of new object (tagged)
4905 // r2 = argument count (tagged)
4906 // r4 = address of parameter map or backing store (tagged)
4907 // r6 = mapped parameter count (tagged)
4908 // Initialize parameter map. If there are no mapped arguments, we're done.
4909 Label skip_parameter_map;
4910 __ cmp(r6, Operand(Smi::FromInt(0)));
4911 // Move backing store address to r1, because it is
4912 // expected there when filling in the unmapped arguments.
4913 __ mov(r1, r4, LeaveCC, eq);
4914 __ b(eq, &skip_parameter_map);
4915
4916 __ LoadRoot(r5, Heap::kSloppyArgumentsElementsMapRootIndex);
4917 __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
4918 __ add(r5, r6, Operand(Smi::FromInt(2)));
4919 __ str(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
4920 __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
4921 __ add(r5, r4, Operand(r6, LSL, 1));
4922 __ add(r5, r5, Operand(kParameterMapHeaderSize));
4923 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
4924
4925 // Copy the parameter slots and the holes in the arguments.
4926 // We need to fill in mapped_parameter_count slots. They index the context,
4927 // where parameters are stored in reverse order, at
4928 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
4929 // The mapped parameter thus need to get indices
4930 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
4931 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
4932 // We loop from right to left.
4933 Label parameters_loop, parameters_test;
4934 __ mov(r5, r6);
4935 __ add(r9, r2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
4936 __ sub(r9, r9, Operand(r6));
4937 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
4938 __ add(r1, r4, Operand(r5, LSL, 1));
4939 __ add(r1, r1, Operand(kParameterMapHeaderSize));
4940
4941 // r1 = address of backing store (tagged)
4942 // r4 = address of parameter map (tagged), which is also the address of new
4943 // object + Heap::kSloppyArgumentsObjectSize (tagged)
4944 // r0 = temporary scratch (a.o., for address calculation)
4945 // r5 = loop variable (tagged)
4946 // ip = the hole value
4947 __ jmp(&parameters_test);
4948
4949 __ bind(&parameters_loop);
4950 __ sub(r5, r5, Operand(Smi::FromInt(1)));
4951 __ mov(r0, Operand(r5, LSL, 1));
4952 __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
4953 __ str(r9, MemOperand(r4, r0));
4954 __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
4955 __ str(ip, MemOperand(r1, r0));
4956 __ add(r9, r9, Operand(Smi::FromInt(1)));
4957 __ bind(&parameters_test);
4958 __ cmp(r5, Operand(Smi::FromInt(0)));
4959 __ b(ne, &parameters_loop);
4960
4961 // Restore r0 = new object (tagged) and r5 = argument count (tagged).
4962 __ sub(r0, r4, Operand(JSSloppyArgumentsObject::kSize));
4963 __ ldr(r5, FieldMemOperand(r0, JSSloppyArgumentsObject::kLengthOffset));
4964
4965 __ bind(&skip_parameter_map);
4966 // r0 = address of new object (tagged)
4967 // r1 = address of backing store (tagged)
4968 // r5 = argument count (tagged)
4969 // r6 = mapped parameter count (tagged)
4970 // r9 = scratch
4971 // Copy arguments header and remaining slots (if there are any).
4972 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
4973 __ str(r9, FieldMemOperand(r1, FixedArray::kMapOffset));
4974 __ str(r5, FieldMemOperand(r1, FixedArray::kLengthOffset));
4975
4976 Label arguments_loop, arguments_test;
4977 __ sub(r3, r3, Operand(r6, LSL, 1));
4978 __ jmp(&arguments_test);
4979
4980 __ bind(&arguments_loop);
4981 __ sub(r3, r3, Operand(kPointerSize));
4982 __ ldr(r4, MemOperand(r3, 0));
4983 __ add(r9, r1, Operand(r6, LSL, 1));
4984 __ str(r4, FieldMemOperand(r9, FixedArray::kHeaderSize));
4985 __ add(r6, r6, Operand(Smi::FromInt(1)));
4986
4987 __ bind(&arguments_test);
4988 __ cmp(r6, Operand(r5));
4989 __ b(lt, &arguments_loop);
4990
4991 // Return.
4992 __ Ret();
4993
4994 // Do the runtime call to allocate the arguments object.
4995 // r0 = address of new object (tagged)
4996 // r5 = argument count (tagged)
4997 __ bind(&runtime);
4998 __ Push(r1, r3, r5);
4999 __ TailCallRuntime(Runtime::kNewSloppyArguments);
5000}
5001
5002
5003void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
5004 // ----------- S t a t e -------------
5005 // -- r1 : function
5006 // -- cp : context
5007 // -- fp : frame pointer
5008 // -- lr : return address
5009 // -----------------------------------
5010 __ AssertFunction(r1);
5011
5012 // For Ignition we need to skip all possible handler/stub frames until
5013 // we reach the JavaScript frame for the function (similar to what the
5014 // runtime fallback implementation does). So make r2 point to that
5015 // JavaScript frame.
5016 {
5017 Label loop, loop_entry;
5018 __ mov(r2, fp);
5019 __ b(&loop_entry);
5020 __ bind(&loop);
5021 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
5022 __ bind(&loop_entry);
Ben Murdochda12d292016-06-02 14:46:10 +01005023 __ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005024 __ cmp(ip, r1);
5025 __ b(ne, &loop);
5026 }
5027
5028 // Check if we have an arguments adaptor frame below the function frame.
5029 Label arguments_adaptor, arguments_done;
5030 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
Ben Murdochda12d292016-06-02 14:46:10 +01005031 __ ldr(ip, MemOperand(r3, CommonFrameConstants::kContextOrFrameTypeOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005032 __ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5033 __ b(eq, &arguments_adaptor);
5034 {
5035 __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
5036 __ ldr(r0, FieldMemOperand(
5037 r1, SharedFunctionInfo::kFormalParameterCountOffset));
5038 __ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
5039 __ add(r2, r2,
5040 Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
5041 }
5042 __ b(&arguments_done);
5043 __ bind(&arguments_adaptor);
5044 {
5045 __ ldr(r0, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
5046 __ add(r2, r3, Operand(r0, LSL, kPointerSizeLog2 - 1));
5047 __ add(r2, r2,
5048 Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
5049 }
5050 __ bind(&arguments_done);
5051
5052 // ----------- S t a t e -------------
5053 // -- cp : context
5054 // -- r0 : number of rest parameters (tagged)
5055 // -- r2 : pointer to first rest parameters
5056 // -- lr : return address
5057 // -----------------------------------
5058
5059 // Allocate space for the strict arguments object plus the backing store.
5060 Label allocate, done_allocate;
5061 __ mov(r1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
5062 __ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1));
5063 __ Allocate(r1, r3, r4, r5, &allocate, TAG_OBJECT);
5064 __ bind(&done_allocate);
5065
5066 // Setup the elements array in r3.
5067 __ LoadRoot(r1, Heap::kFixedArrayMapRootIndex);
5068 __ str(r1, FieldMemOperand(r3, FixedArray::kMapOffset));
5069 __ str(r0, FieldMemOperand(r3, FixedArray::kLengthOffset));
5070 __ add(r4, r3, Operand(FixedArray::kHeaderSize));
5071 {
5072 Label loop, done_loop;
5073 __ add(r1, r4, Operand(r0, LSL, kPointerSizeLog2 - 1));
5074 __ bind(&loop);
5075 __ cmp(r4, r1);
5076 __ b(eq, &done_loop);
5077 __ ldr(ip, MemOperand(r2, 1 * kPointerSize, NegPostIndex));
5078 __ str(ip, FieldMemOperand(r4, 0 * kPointerSize));
5079 __ add(r4, r4, Operand(1 * kPointerSize));
5080 __ b(&loop);
5081 __ bind(&done_loop);
5082 }
5083
5084 // Setup the strict arguments object in r4.
5085 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r1);
5086 __ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kMapOffset));
5087 __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
5088 __ str(r1, FieldMemOperand(r4, JSStrictArgumentsObject::kPropertiesOffset));
5089 __ str(r3, FieldMemOperand(r4, JSStrictArgumentsObject::kElementsOffset));
5090 __ str(r0, FieldMemOperand(r4, JSStrictArgumentsObject::kLengthOffset));
5091 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
5092 __ mov(r0, r4);
5093 __ Ret();
5094
5095 // Fall back to %AllocateInNewSpace.
5096 __ bind(&allocate);
5097 {
5098 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
5099 __ SmiTag(r1);
5100 __ Push(r0, r2, r1);
5101 __ CallRuntime(Runtime::kAllocateInNewSpace);
5102 __ mov(r3, r0);
5103 __ Pop(r0, r2);
5104 }
5105 __ b(&done_allocate);
5106}
5107
5108
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005109void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
5110 Register context = cp;
5111 Register result = r0;
5112 Register slot = r2;
5113
5114 // Go up the context chain to the script context.
5115 for (int i = 0; i < depth(); ++i) {
5116 __ ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX));
5117 context = result;
5118 }
5119
5120 // Load the PropertyCell value at the specified slot.
5121 __ add(result, context, Operand(slot, LSL, kPointerSizeLog2));
5122 __ ldr(result, ContextMemOperand(result));
5123 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
5124
5125 // If the result is not the_hole, return. Otherwise, handle in the runtime.
5126 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
5127 __ Ret(ne);
5128
5129 // Fallback to runtime.
5130 __ SmiTag(slot);
5131 __ push(slot);
5132 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5133}
5134
5135
5136void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5137 Register value = r0;
5138 Register slot = r2;
5139
5140 Register cell = r1;
5141 Register cell_details = r4;
5142 Register cell_value = r5;
5143 Register cell_value_map = r6;
5144 Register scratch = r9;
5145
5146 Register context = cp;
5147 Register context_temp = cell;
5148
5149 Label fast_heapobject_case, fast_smi_case, slow_case;
5150
5151 if (FLAG_debug_code) {
5152 __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
5153 __ Check(ne, kUnexpectedValue);
5154 }
5155
5156 // Go up the context chain to the script context.
5157 for (int i = 0; i < depth(); i++) {
5158 __ ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX));
5159 context = context_temp;
5160 }
5161
5162 // Load the PropertyCell at the specified slot.
5163 __ add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
5164 __ ldr(cell, ContextMemOperand(cell));
5165
5166 // Load PropertyDetails for the cell (actually only the cell_type and kind).
5167 __ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
5168 __ SmiUntag(cell_details);
5169 __ and_(cell_details, cell_details,
5170 Operand(PropertyDetails::PropertyCellTypeField::kMask |
5171 PropertyDetails::KindField::kMask |
5172 PropertyDetails::kAttributesReadOnlyMask));
5173
5174 // Check if PropertyCell holds mutable data.
5175 Label not_mutable_data;
5176 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5177 PropertyCellType::kMutable) |
5178 PropertyDetails::KindField::encode(kData)));
5179 __ b(ne, &not_mutable_data);
5180 __ JumpIfSmi(value, &fast_smi_case);
5181
5182 __ bind(&fast_heapobject_case);
5183 __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
5184 // RecordWriteField clobbers the value register, so we copy it before the
5185 // call.
5186 __ mov(r4, Operand(value));
5187 __ RecordWriteField(cell, PropertyCell::kValueOffset, r4, scratch,
5188 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5189 OMIT_SMI_CHECK);
5190 __ Ret();
5191
5192 __ bind(&not_mutable_data);
5193 // Check if PropertyCell value matches the new value (relevant for Constant,
5194 // ConstantType and Undefined cells).
5195 Label not_same_value;
5196 __ ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
5197 __ cmp(cell_value, value);
5198 __ b(ne, &not_same_value);
5199
5200 // Make sure the PropertyCell is not marked READ_ONLY.
5201 __ tst(cell_details, Operand(PropertyDetails::kAttributesReadOnlyMask));
5202 __ b(ne, &slow_case);
5203
5204 if (FLAG_debug_code) {
5205 Label done;
5206 // This can only be true for Constant, ConstantType and Undefined cells,
5207 // because we never store the_hole via this stub.
5208 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5209 PropertyCellType::kConstant) |
5210 PropertyDetails::KindField::encode(kData)));
5211 __ b(eq, &done);
5212 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5213 PropertyCellType::kConstantType) |
5214 PropertyDetails::KindField::encode(kData)));
5215 __ b(eq, &done);
5216 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5217 PropertyCellType::kUndefined) |
5218 PropertyDetails::KindField::encode(kData)));
5219 __ Check(eq, kUnexpectedValue);
5220 __ bind(&done);
5221 }
5222 __ Ret();
5223 __ bind(&not_same_value);
5224
5225 // Check if PropertyCell contains data with constant type (and is not
5226 // READ_ONLY).
5227 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5228 PropertyCellType::kConstantType) |
5229 PropertyDetails::KindField::encode(kData)));
5230 __ b(ne, &slow_case);
5231
5232 // Now either both old and new values must be smis or both must be heap
5233 // objects with same map.
5234 Label value_is_heap_object;
5235 __ JumpIfNotSmi(value, &value_is_heap_object);
5236 __ JumpIfNotSmi(cell_value, &slow_case);
5237 // Old and new values are smis, no need for a write barrier here.
5238 __ bind(&fast_smi_case);
5239 __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
5240 __ Ret();
5241
5242 __ bind(&value_is_heap_object);
5243 __ JumpIfSmi(cell_value, &slow_case);
5244
5245 __ ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
5246 __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
5247 __ cmp(cell_value_map, scratch);
5248 __ b(eq, &fast_heapobject_case);
5249
5250 // Fallback to runtime.
5251 __ bind(&slow_case);
5252 __ SmiTag(slot);
5253 __ Push(slot, value);
5254 __ TailCallRuntime(is_strict(language_mode())
5255 ? Runtime::kStoreGlobalViaContext_Strict
5256 : Runtime::kStoreGlobalViaContext_Sloppy);
5257}
5258
5259
5260static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5261 return ref0.address() - ref1.address();
5262}
5263
5264
5265// Calls an API function. Allocates HandleScope, extracts returned value
5266// from handle and propagates exceptions. Restores context. stack_space
5267// - space to be unwound on exit (includes the call JS arguments space and
5268// the additional space allocated for the fast call).
5269static void CallApiFunctionAndReturn(MacroAssembler* masm,
5270 Register function_address,
5271 ExternalReference thunk_ref,
5272 int stack_space,
5273 MemOperand* stack_space_operand,
5274 MemOperand return_value_operand,
5275 MemOperand* context_restore_operand) {
5276 Isolate* isolate = masm->isolate();
5277 ExternalReference next_address =
5278 ExternalReference::handle_scope_next_address(isolate);
5279 const int kNextOffset = 0;
5280 const int kLimitOffset = AddressOffset(
5281 ExternalReference::handle_scope_limit_address(isolate), next_address);
5282 const int kLevelOffset = AddressOffset(
5283 ExternalReference::handle_scope_level_address(isolate), next_address);
5284
5285 DCHECK(function_address.is(r1) || function_address.is(r2));
5286
5287 Label profiler_disabled;
5288 Label end_profiler_check;
5289 __ mov(r9, Operand(ExternalReference::is_profiling_address(isolate)));
5290 __ ldrb(r9, MemOperand(r9, 0));
5291 __ cmp(r9, Operand(0));
5292 __ b(eq, &profiler_disabled);
5293
5294 // Additional parameter is the address of the actual callback.
5295 __ mov(r3, Operand(thunk_ref));
5296 __ jmp(&end_profiler_check);
5297
5298 __ bind(&profiler_disabled);
5299 __ Move(r3, function_address);
5300 __ bind(&end_profiler_check);
5301
5302 // Allocate HandleScope in callee-save registers.
5303 __ mov(r9, Operand(next_address));
5304 __ ldr(r4, MemOperand(r9, kNextOffset));
5305 __ ldr(r5, MemOperand(r9, kLimitOffset));
5306 __ ldr(r6, MemOperand(r9, kLevelOffset));
5307 __ add(r6, r6, Operand(1));
5308 __ str(r6, MemOperand(r9, kLevelOffset));
5309
5310 if (FLAG_log_timer_events) {
5311 FrameScope frame(masm, StackFrame::MANUAL);
5312 __ PushSafepointRegisters();
5313 __ PrepareCallCFunction(1, r0);
5314 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5315 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5316 1);
5317 __ PopSafepointRegisters();
5318 }
5319
5320 // Native call returns to the DirectCEntry stub which redirects to the
5321 // return address pushed on stack (could have moved after GC).
5322 // DirectCEntry stub itself is generated early and never moves.
5323 DirectCEntryStub stub(isolate);
5324 stub.GenerateCall(masm, r3);
5325
5326 if (FLAG_log_timer_events) {
5327 FrameScope frame(masm, StackFrame::MANUAL);
5328 __ PushSafepointRegisters();
5329 __ PrepareCallCFunction(1, r0);
5330 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5331 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5332 1);
5333 __ PopSafepointRegisters();
5334 }
5335
5336 Label promote_scheduled_exception;
5337 Label delete_allocated_handles;
5338 Label leave_exit_frame;
5339 Label return_value_loaded;
5340
5341 // load value from ReturnValue
5342 __ ldr(r0, return_value_operand);
5343 __ bind(&return_value_loaded);
5344 // No more valid handles (the result handle was the last one). Restore
5345 // previous handle scope.
5346 __ str(r4, MemOperand(r9, kNextOffset));
5347 if (__ emit_debug_code()) {
5348 __ ldr(r1, MemOperand(r9, kLevelOffset));
5349 __ cmp(r1, r6);
5350 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
5351 }
5352 __ sub(r6, r6, Operand(1));
5353 __ str(r6, MemOperand(r9, kLevelOffset));
5354 __ ldr(ip, MemOperand(r9, kLimitOffset));
5355 __ cmp(r5, ip);
5356 __ b(ne, &delete_allocated_handles);
5357
5358 // Leave the API exit frame.
5359 __ bind(&leave_exit_frame);
5360 bool restore_context = context_restore_operand != NULL;
5361 if (restore_context) {
5362 __ ldr(cp, *context_restore_operand);
5363 }
5364 // LeaveExitFrame expects unwind space to be in a register.
5365 if (stack_space_operand != NULL) {
5366 __ ldr(r4, *stack_space_operand);
5367 } else {
5368 __ mov(r4, Operand(stack_space));
5369 }
5370 __ LeaveExitFrame(false, r4, !restore_context, stack_space_operand != NULL);
5371
5372 // Check if the function scheduled an exception.
5373 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5374 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate)));
5375 __ ldr(r5, MemOperand(ip));
5376 __ cmp(r4, r5);
5377 __ b(ne, &promote_scheduled_exception);
5378
5379 __ mov(pc, lr);
5380
5381 // Re-throw by promoting a scheduled exception.
5382 __ bind(&promote_scheduled_exception);
5383 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5384
5385 // HandleScope limit has changed. Delete allocated extensions.
5386 __ bind(&delete_allocated_handles);
5387 __ str(r5, MemOperand(r9, kLimitOffset));
5388 __ mov(r4, r0);
5389 __ PrepareCallCFunction(1, r5);
5390 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5391 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5392 1);
5393 __ mov(r0, r4);
5394 __ jmp(&leave_exit_frame);
5395}
5396
Ben Murdochda12d292016-06-02 14:46:10 +01005397void CallApiCallbackStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005398 // ----------- S t a t e -------------
5399 // -- r0 : callee
5400 // -- r4 : call_data
5401 // -- r2 : holder
5402 // -- r1 : api_function_address
5403 // -- cp : context
5404 // --
5405 // -- sp[0] : last argument
5406 // -- ...
5407 // -- sp[(argc - 1)* 4] : first argument
5408 // -- sp[argc * 4] : receiver
5409 // -----------------------------------
5410
5411 Register callee = r0;
5412 Register call_data = r4;
5413 Register holder = r2;
5414 Register api_function_address = r1;
5415 Register context = cp;
5416
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005417 typedef FunctionCallbackArguments FCA;
5418
5419 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5420 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5421 STATIC_ASSERT(FCA::kDataIndex == 4);
5422 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5423 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5424 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5425 STATIC_ASSERT(FCA::kHolderIndex == 0);
5426 STATIC_ASSERT(FCA::kArgsLength == 7);
5427
5428 // context save
5429 __ push(context);
Ben Murdochda12d292016-06-02 14:46:10 +01005430 if (!is_lazy()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01005431 // load context from callee
5432 __ ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5433 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005434
5435 // callee
5436 __ push(callee);
5437
5438 // call data
5439 __ push(call_data);
5440
5441 Register scratch = call_data;
Ben Murdochda12d292016-06-02 14:46:10 +01005442 if (!call_data_undefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005443 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5444 }
5445 // return value
5446 __ push(scratch);
5447 // return value default
5448 __ push(scratch);
5449 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005450 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005451 __ push(scratch);
5452 // holder
5453 __ push(holder);
5454
5455 // Prepare arguments.
5456 __ mov(scratch, sp);
5457
5458 // Allocate the v8::Arguments structure in the arguments' space since
5459 // it's not controlled by GC.
5460 const int kApiStackSpace = 4;
5461
5462 FrameScope frame_scope(masm, StackFrame::MANUAL);
5463 __ EnterExitFrame(false, kApiStackSpace);
5464
5465 DCHECK(!api_function_address.is(r0) && !scratch.is(r0));
5466 // r0 = FunctionCallbackInfo&
5467 // Arguments is after the return address.
5468 __ add(r0, sp, Operand(1 * kPointerSize));
5469 // FunctionCallbackInfo::implicit_args_
5470 __ str(scratch, MemOperand(r0, 0 * kPointerSize));
Ben Murdochda12d292016-06-02 14:46:10 +01005471 // FunctionCallbackInfo::values_
5472 __ add(ip, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize));
5473 __ str(ip, MemOperand(r0, 1 * kPointerSize));
5474 // FunctionCallbackInfo::length_ = argc
5475 __ mov(ip, Operand(argc()));
5476 __ str(ip, MemOperand(r0, 2 * kPointerSize));
5477 // FunctionCallbackInfo::is_construct_call_ = 0
5478 __ mov(ip, Operand::Zero());
5479 __ str(ip, MemOperand(r0, 3 * kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005480
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005481 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005482 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005483
5484 AllowExternalCallThatCantCauseGC scope(masm);
5485 MemOperand context_restore_operand(
5486 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5487 // Stores return the first js argument
5488 int return_value_offset = 0;
Ben Murdochda12d292016-06-02 14:46:10 +01005489 if (is_store()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005490 return_value_offset = 2 + FCA::kArgsLength;
5491 } else {
5492 return_value_offset = 2 + FCA::kReturnValueOffset;
5493 }
5494 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005495 int stack_space = 0;
5496 MemOperand is_construct_call_operand = MemOperand(sp, 4 * kPointerSize);
5497 MemOperand* stack_space_operand = &is_construct_call_operand;
Ben Murdochda12d292016-06-02 14:46:10 +01005498 stack_space = argc() + FCA::kArgsLength + 1;
5499 stack_space_operand = NULL;
5500
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005501 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
5502 stack_space_operand, return_value_operand,
5503 &context_restore_operand);
5504}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005505
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005506
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005507void CallApiGetterStub::Generate(MacroAssembler* masm) {
5508 // ----------- S t a t e -------------
Ben Murdoch097c5b22016-05-18 11:27:45 +01005509 // -- sp[0] : name
5510 // -- sp[4 .. (4 + kArgsLength*4)] : v8::PropertyCallbackInfo::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005511 // -- ...
Ben Murdoch097c5b22016-05-18 11:27:45 +01005512 // -- r2 : api_function_address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005513 // -----------------------------------
5514
5515 Register api_function_address = ApiGetterDescriptor::function_address();
5516 DCHECK(api_function_address.is(r2));
5517
Ben Murdoch097c5b22016-05-18 11:27:45 +01005518 // v8::PropertyCallbackInfo::args_ array and name handle.
5519 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5520
5521 // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
5522 __ mov(r0, sp); // r0 = Handle<Name>
5523 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = v8::PCI::args_
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005524
5525 const int kApiStackSpace = 1;
5526 FrameScope frame_scope(masm, StackFrame::MANUAL);
5527 __ EnterExitFrame(false, kApiStackSpace);
5528
Ben Murdoch097c5b22016-05-18 11:27:45 +01005529 // Create v8::PropertyCallbackInfo object on the stack and initialize
5530 // it's args_ field.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005531 __ str(r1, MemOperand(sp, 1 * kPointerSize));
Ben Murdoch097c5b22016-05-18 11:27:45 +01005532 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = v8::PropertyCallbackInfo&
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005533
5534 ExternalReference thunk_ref =
5535 ExternalReference::invoke_accessor_getter_callback(isolate());
Ben Murdoch097c5b22016-05-18 11:27:45 +01005536
5537 // +3 is to skip prolog, return address and name handle.
5538 MemOperand return_value_operand(
5539 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005540 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
Ben Murdoch097c5b22016-05-18 11:27:45 +01005541 kStackUnwindSpace, NULL, return_value_operand, NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005542}
5543
5544
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005545#undef __
5546
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005547} // namespace internal
5548} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005549
5550#endif // V8_TARGET_ARCH_ARM