blob: 21413335ea0764e443d50c9a02523a8755d0361b [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/base/bits.h"
8#include "src/bootstrapper.h"
9#include "src/code-stubs.h"
10#include "src/codegen.h"
11#include "src/ic/handler-compiler.h"
12#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/ic/stub-cache.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/isolate.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015#include "src/regexp/jsregexp.h"
16#include "src/regexp/regexp-macro-assembler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017#include "src/runtime/runtime.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010018
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000019#include "src/arm/code-stubs-arm.h"
20
Kristian Monsen80d68ea2010-09-08 11:05:35 +010021namespace v8 {
22namespace internal {
23
24
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025static void InitializeArrayConstructorDescriptor(
26 Isolate* isolate, CodeStubDescriptor* descriptor,
27 int constant_stack_parameter_count) {
28 Address deopt_handler = Runtime::FunctionForId(
29 Runtime::kArrayConstructor)->entry;
30
31 if (constant_stack_parameter_count == 0) {
32 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
33 JS_FUNCTION_STUB_MODE);
34 } else {
35 descriptor->Initialize(r0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037 }
38}
39
40
41static void InitializeInternalArrayConstructorDescriptor(
42 Isolate* isolate, CodeStubDescriptor* descriptor,
43 int constant_stack_parameter_count) {
44 Address deopt_handler = Runtime::FunctionForId(
45 Runtime::kInternalArrayConstructor)->entry;
46
47 if (constant_stack_parameter_count == 0) {
48 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
49 JS_FUNCTION_STUB_MODE);
50 } else {
51 descriptor->Initialize(r0, deopt_handler, constant_stack_parameter_count,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000052 JS_FUNCTION_STUB_MODE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 }
54}
55
56
57void ArrayNoArgumentConstructorStub::InitializeDescriptor(
58 CodeStubDescriptor* descriptor) {
59 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
60}
61
62
63void ArraySingleArgumentConstructorStub::InitializeDescriptor(
64 CodeStubDescriptor* descriptor) {
65 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
66}
67
68
69void ArrayNArgumentsConstructorStub::InitializeDescriptor(
70 CodeStubDescriptor* descriptor) {
71 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
72}
73
74
75void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
76 CodeStubDescriptor* descriptor) {
77 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
78}
79
80
81void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
82 CodeStubDescriptor* descriptor) {
83 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
84}
85
86
87void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
88 CodeStubDescriptor* descriptor) {
89 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
90}
91
92
Kristian Monsen80d68ea2010-09-08 11:05:35 +010093#define __ ACCESS_MASM(masm)
94
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000096static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
97 Condition cond, Strength strength);
Kristian Monsen80d68ea2010-09-08 11:05:35 +010098static void EmitSmiNonsmiComparison(MacroAssembler* masm,
99 Register lhs,
100 Register rhs,
101 Label* lhs_not_nan,
102 Label* slow,
103 bool strict);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100104static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
105 Register lhs,
106 Register rhs);
107
108
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000109void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
110 ExternalReference miss) {
111 // Update the static counter each time a new code stub is generated.
112 isolate()->counters()->code_stubs()->Increment();
Ben Murdoch257744e2011-11-30 15:57:28 +0000113
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000114 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000115 int param_count = descriptor.GetRegisterParameterCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000116 {
117 // Call the runtime system in a fresh internal frame.
118 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
119 DCHECK(param_count == 0 ||
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000120 r0.is(descriptor.GetRegisterParameter(param_count - 1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000121 // Push arguments
122 for (int i = 0; i < param_count; ++i) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 __ push(descriptor.GetRegisterParameter(i));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100124 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 __ CallExternalReference(miss, param_count);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100126 }
127
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 __ Ret();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100129}
130
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100131
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000132void DoubleToIStub::Generate(MacroAssembler* masm) {
133 Label out_of_range, only_low, negate, done;
134 Register input_reg = source();
135 Register result_reg = destination();
136 DCHECK(is_truncating());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100137
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138 int double_offset = offset();
139 // Account for saved regs if input is sp.
140 if (input_reg.is(sp)) double_offset += 3 * kPointerSize;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100141
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 Register scratch = GetRegisterThatIsNotOneOf(input_reg, result_reg);
143 Register scratch_low =
144 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
145 Register scratch_high =
146 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch_low);
147 LowDwVfpRegister double_scratch = kScratchDoubleReg;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100148
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 __ Push(scratch_high, scratch_low, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100150
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151 if (!skip_fastpath()) {
152 // Load double input.
153 __ vldr(double_scratch, MemOperand(input_reg, double_offset));
154 __ vmov(scratch_low, scratch_high, double_scratch);
155
156 // Do fast-path convert from double to int.
157 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
158 __ vmov(result_reg, double_scratch.low());
159
160 // If result is not saturated (0x7fffffff or 0x80000000), we are done.
161 __ sub(scratch, result_reg, Operand(1));
162 __ cmp(scratch, Operand(0x7ffffffe));
163 __ b(lt, &done);
164 } else {
165 // We've already done MacroAssembler::TryFastTruncatedDoubleToILoad, so we
166 // know exponent > 31, so we can skip the vcvt_s32_f64 which will saturate.
167 if (double_offset == 0) {
168 __ ldm(ia, input_reg, scratch_low.bit() | scratch_high.bit());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100169 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 __ ldr(scratch_low, MemOperand(input_reg, double_offset));
171 __ ldr(scratch_high, MemOperand(input_reg, double_offset + kIntSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100172 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100173 }
174
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000175 __ Ubfx(scratch, scratch_high,
176 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
177 // Load scratch with exponent - 1. This is faster than loading
178 // with exponent because Bias + 1 = 1024 which is an *ARM* immediate value.
179 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
180 __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
181 // If exponent is greater than or equal to 84, the 32 less significant
182 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
183 // the result is 0.
184 // Compare exponent with 84 (compare exponent - 1 with 83).
185 __ cmp(scratch, Operand(83));
186 __ b(ge, &out_of_range);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100187
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000188 // If we reach this code, 31 <= exponent <= 83.
189 // So, we don't have to handle cases where 0 <= exponent <= 20 for
190 // which we would need to shift right the high part of the mantissa.
191 // Scratch contains exponent - 1.
192 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
193 __ rsb(scratch, scratch, Operand(51), SetCC);
194 __ b(ls, &only_low);
195 // 21 <= exponent <= 51, shift scratch_low and scratch_high
196 // to generate the result.
197 __ mov(scratch_low, Operand(scratch_low, LSR, scratch));
198 // Scratch contains: 52 - exponent.
199 // We needs: exponent - 20.
200 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
201 __ rsb(scratch, scratch, Operand(32));
202 __ Ubfx(result_reg, scratch_high,
203 0, HeapNumber::kMantissaBitsInTopWord);
204 // Set the implicit 1 before the mantissa part in scratch_high.
205 __ orr(result_reg, result_reg,
206 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
207 __ orr(result_reg, scratch_low, Operand(result_reg, LSL, scratch));
208 __ b(&negate);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100209
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000210 __ bind(&out_of_range);
211 __ mov(result_reg, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100212 __ b(&done);
213
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 __ bind(&only_low);
215 // 52 <= exponent <= 83, shift only scratch_low.
216 // On entry, scratch contains: 52 - exponent.
217 __ rsb(scratch, scratch, Operand::Zero());
218 __ mov(result_reg, Operand(scratch_low, LSL, scratch));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100219
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000220 __ bind(&negate);
221 // If input was positive, scratch_high ASR 31 equals 0 and
222 // scratch_high LSR 31 equals zero.
223 // New result = (result eor 0) + 0 = result.
224 // If the input was negative, we have to negate the result.
225 // Input_high ASR 31 equals 0xffffffff and scratch_high LSR 31 equals 1.
226 // New result = (result eor 0xffffffff) + 1 = 0 - result.
227 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31));
228 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100229
230 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000231
232 __ Pop(scratch_high, scratch_low, scratch);
233 __ Ret();
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100234}
235
236
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100237// Handle the case where the lhs and rhs are the same object.
238// Equality is almost reflexive (everything but NaN), so this is a test
239// for "identity and not NaN".
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
241 Condition cond, Strength strength) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100242 Label not_identical;
243 Label heap_number, return_equal;
244 __ cmp(r0, r1);
245 __ b(ne, &not_identical);
246
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000247 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
248 // so we do the second best thing - test it ourselves.
249 // They are both equal and they are not both Smis so both of them are not
250 // Smis. If it's not a heap number, then return equal.
251 if (cond == lt || cond == gt) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 // Call runtime on identical JSObjects.
253 __ CompareObjectType(r0, r4, r4, FIRST_JS_RECEIVER_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000254 __ b(ge, slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 // Call runtime on identical symbols since we need to throw a TypeError.
256 __ cmp(r4, Operand(SYMBOL_TYPE));
257 __ b(eq, slow);
258 // Call runtime on identical SIMD values since we must throw a TypeError.
259 __ cmp(r4, Operand(SIMD128_VALUE_TYPE));
260 __ b(eq, slow);
261 if (is_strong(strength)) {
262 // Call the runtime on anything that is converted in the semantics, since
263 // we need to throw a TypeError. Smis have already been ruled out.
264 __ cmp(r4, Operand(HEAP_NUMBER_TYPE));
265 __ b(eq, &return_equal);
266 __ tst(r4, Operand(kIsNotStringMask));
267 __ b(ne, slow);
268 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 } else {
270 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
271 __ b(eq, &heap_number);
272 // Comparing JS objects with <=, >= is complicated.
273 if (cond != eq) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000274 __ cmp(r4, Operand(FIRST_JS_RECEIVER_TYPE));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100275 __ b(ge, slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276 // Call runtime on identical symbols since we need to throw a TypeError.
277 __ cmp(r4, Operand(SYMBOL_TYPE));
278 __ b(eq, slow);
279 // Call runtime on identical SIMD values since we must throw a TypeError.
280 __ cmp(r4, Operand(SIMD128_VALUE_TYPE));
281 __ b(eq, slow);
282 if (is_strong(strength)) {
283 // Call the runtime on anything that is converted in the semantics,
284 // since we need to throw a TypeError. Smis and heap numbers have
285 // already been ruled out.
286 __ tst(r4, Operand(kIsNotStringMask));
287 __ b(ne, slow);
288 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 // Normally here we fall through to return_equal, but undefined is
290 // special: (undefined == undefined) == true, but
291 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
292 if (cond == le || cond == ge) {
293 __ cmp(r4, Operand(ODDBALL_TYPE));
294 __ b(ne, &return_equal);
295 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
296 __ cmp(r0, r2);
297 __ b(ne, &return_equal);
298 if (cond == le) {
299 // undefined <= undefined should fail.
300 __ mov(r0, Operand(GREATER));
301 } else {
302 // undefined >= undefined should fail.
303 __ mov(r0, Operand(LESS));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100304 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100306 }
307 }
308 }
309
310 __ bind(&return_equal);
Steve Block1e0659c2011-05-24 12:43:12 +0100311 if (cond == lt) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100312 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves.
Steve Block1e0659c2011-05-24 12:43:12 +0100313 } else if (cond == gt) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100314 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves.
315 } else {
316 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves.
317 }
318 __ Ret();
319
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000320 // For less and greater we don't have to check for NaN since the result of
321 // x < x is false regardless. For the others here is some code to check
322 // for NaN.
323 if (cond != lt && cond != gt) {
324 __ bind(&heap_number);
325 // It is a heap number, so return non-equal if it's NaN and equal if it's
326 // not NaN.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100327
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 // The representation of NaN values has all exponent bits (52..62) set,
329 // and not all mantissa bits (0..51) clear.
330 // Read top bits of double representation (second word of value).
331 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
332 // Test that exponent bits are all set.
333 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits);
334 // NaNs have all-one exponents so they sign extend to -1.
335 __ cmp(r3, Operand(-1));
336 __ b(ne, &return_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100337
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000338 // Shift out flag and all exponent bits, retaining only mantissa.
339 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord));
340 // Or with all low-bits of mantissa.
341 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
342 __ orr(r0, r3, Operand(r2), SetCC);
343 // For equal we already have the right value in r0: Return zero (equal)
344 // if all bits in mantissa are zero (it's an Infinity) and non-zero if
345 // not (it's a NaN). For <= and >= we need to load r0 with the failing
346 // value if it's a NaN.
347 if (cond != eq) {
348 // All-zero means Infinity means equal.
349 __ Ret(eq);
350 if (cond == le) {
351 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail.
352 } else {
353 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100354 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100355 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000356 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100357 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 // No fall through here.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100359
360 __ bind(&not_identical);
361}
362
363
364// See comment at call site.
365static void EmitSmiNonsmiComparison(MacroAssembler* masm,
366 Register lhs,
367 Register rhs,
368 Label* lhs_not_nan,
369 Label* slow,
370 bool strict) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100372 (lhs.is(r1) && rhs.is(r0)));
373
374 Label rhs_is_smi;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000375 __ JumpIfSmi(rhs, &rhs_is_smi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100376
377 // Lhs is a Smi. Check whether the rhs is a heap number.
378 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE);
379 if (strict) {
380 // If rhs is not a number and lhs is a Smi then strict equality cannot
381 // succeed. Return non-equal
382 // If rhs is r0 then there is already a non zero value in it.
383 if (!rhs.is(r0)) {
384 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
385 }
386 __ Ret(ne);
387 } else {
388 // Smi compared non-strictly with a non-Smi non-heap-number. Call
389 // the runtime.
390 __ b(ne, slow);
391 }
392
393 // Lhs is a smi, rhs is a number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000394 // Convert lhs to a double in d7.
395 __ SmiToDouble(d7, lhs);
396 // Load the double from rhs, tagged HeapNumber r0, to d6.
397 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100398
399 // We now have both loaded as doubles but we can skip the lhs nan check
400 // since it's a smi.
401 __ jmp(lhs_not_nan);
402
403 __ bind(&rhs_is_smi);
404 // Rhs is a smi. Check whether the non-smi lhs is a heap number.
405 __ CompareObjectType(lhs, r4, r4, HEAP_NUMBER_TYPE);
406 if (strict) {
407 // If lhs is not a number and rhs is a smi then strict equality cannot
408 // succeed. Return non-equal.
409 // If lhs is r0 then there is already a non zero value in it.
410 if (!lhs.is(r0)) {
411 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
412 }
413 __ Ret(ne);
414 } else {
415 // Smi compared non-strictly with a non-smi non-heap-number. Call
416 // the runtime.
417 __ b(ne, slow);
418 }
419
420 // Rhs is a smi, lhs is a heap number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000421 // Load the double from lhs, tagged HeapNumber r1, to d7.
422 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag);
423 // Convert rhs to a double in d6 .
424 __ SmiToDouble(d6, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100425 // Fall through to both_loaded_as_doubles.
426}
427
428
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100429// See comment at call site.
430static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
431 Register lhs,
432 Register rhs) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000433 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100434 (lhs.is(r1) && rhs.is(r0)));
435
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000436 // If either operand is a JS object or an oddball value, then they are
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100437 // not equal since their pointers are different.
438 // There is no test for undetectability in strict equality.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100440 Label first_non_object;
441 // Get the type of the first operand into r2 and compare it with
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000442 // FIRST_JS_RECEIVER_TYPE.
443 __ CompareObjectType(rhs, r2, r2, FIRST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100444 __ b(lt, &first_non_object);
445
446 // Return non-zero (r0 is not zero)
447 Label return_not_equal;
448 __ bind(&return_not_equal);
449 __ Ret();
450
451 __ bind(&first_non_object);
452 // Check for oddballs: true, false, null, undefined.
453 __ cmp(r2, Operand(ODDBALL_TYPE));
454 __ b(eq, &return_not_equal);
455
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000456 __ CompareObjectType(lhs, r3, r3, FIRST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100457 __ b(ge, &return_not_equal);
458
459 // Check for oddballs: true, false, null, undefined.
460 __ cmp(r3, Operand(ODDBALL_TYPE));
461 __ b(eq, &return_not_equal);
462
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463 // Now that we have the types we might as well check for
464 // internalized-internalized.
465 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
466 __ orr(r2, r2, Operand(r3));
467 __ tst(r2, Operand(kIsNotStringMask | kIsNotInternalizedMask));
468 __ b(eq, &return_not_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100469}
470
471
472// See comment at call site.
473static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
474 Register lhs,
475 Register rhs,
476 Label* both_loaded_as_doubles,
477 Label* not_heap_numbers,
478 Label* slow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100480 (lhs.is(r1) && rhs.is(r0)));
481
482 __ CompareObjectType(rhs, r3, r2, HEAP_NUMBER_TYPE);
483 __ b(ne, not_heap_numbers);
484 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
485 __ cmp(r2, r3);
486 __ b(ne, slow); // First was a heap number, second wasn't. Go slow case.
487
488 // Both are heap numbers. Load them up then jump to the code we have
489 // for that.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag);
491 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100492 __ jmp(both_loaded_as_doubles);
493}
494
495
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496// Fast negative check for internalized-to-internalized equality.
497static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
498 Register lhs,
499 Register rhs,
500 Label* possible_strings,
501 Label* not_both_strings) {
502 DCHECK((lhs.is(r0) && rhs.is(r1)) ||
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100503 (lhs.is(r1) && rhs.is(r0)));
504
505 // r2 is object type of rhs.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100506 Label object_test;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100508 __ tst(r2, Operand(kIsNotStringMask));
509 __ b(ne, &object_test);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510 __ tst(r2, Operand(kIsNotInternalizedMask));
511 __ b(ne, possible_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100512 __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
513 __ b(ge, not_both_strings);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514 __ tst(r3, Operand(kIsNotInternalizedMask));
515 __ b(ne, possible_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100516
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 // Both are internalized. We already checked they weren't the same pointer
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100518 // so they are not equal.
519 __ mov(r0, Operand(NOT_EQUAL));
520 __ Ret();
521
522 __ bind(&object_test);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000523 __ cmp(r2, Operand(FIRST_JS_RECEIVER_TYPE));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100524 __ b(lt, not_both_strings);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000525 __ CompareObjectType(lhs, r2, r3, FIRST_JS_RECEIVER_TYPE);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100526 __ b(lt, not_both_strings);
527 // If both objects are undetectable, they are equal. Otherwise, they
528 // are not equal, since they are different objects and an object is not
529 // equal to undefined.
530 __ ldr(r3, FieldMemOperand(rhs, HeapObject::kMapOffset));
531 __ ldrb(r2, FieldMemOperand(r2, Map::kBitFieldOffset));
532 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
533 __ and_(r0, r2, Operand(r3));
534 __ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
535 __ eor(r0, r0, Operand(1 << Map::kIsUndetectable));
536 __ Ret();
537}
538
539
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000540static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
541 Register scratch,
542 CompareICState::State expected,
543 Label* fail) {
544 Label ok;
545 if (expected == CompareICState::SMI) {
546 __ JumpIfNotSmi(input, fail);
547 } else if (expected == CompareICState::NUMBER) {
548 __ JumpIfSmi(input, &ok);
549 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
550 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100551 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000552 // We could be strict about internalized/non-internalized here, but as long as
553 // hydrogen doesn't care, the stub doesn't have to care either.
554 __ bind(&ok);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100555}
556
557
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000558// On entry r1 and r2 are the values to be compared.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100559// On exit r0 is 0, positive or negative to indicate the result of
560// the comparison.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
562 Register lhs = r1;
563 Register rhs = r0;
564 Condition cc = GetCondition();
565
566 Label miss;
567 CompareICStub_CheckInputType(masm, lhs, r2, left(), &miss);
568 CompareICStub_CheckInputType(masm, rhs, r3, right(), &miss);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100569
570 Label slow; // Call builtin.
571 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
572
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000573 Label not_two_smis, smi_done;
574 __ orr(r2, r1, r0);
575 __ JumpIfNotSmi(r2, &not_two_smis);
576 __ mov(r1, Operand(r1, ASR, 1));
577 __ sub(r0, r1, Operand(r0, ASR, 1));
578 __ Ret();
579 __ bind(&not_two_smis);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100580
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100581 // NOTICE! This code is only reached after a smi-fast-case check, so
582 // it is certain that at least one operand isn't a smi.
583
584 // Handle the case where the objects are identical. Either returns the answer
585 // or goes to slow. Only falls through if the objects were not identical.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 EmitIdenticalObjectComparison(masm, &slow, cc, strength());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100587
588 // If either is a Smi (we know that not both are), then they can only
589 // be strictly equal if the other is a HeapNumber.
590 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000592 __ and_(r2, lhs, Operand(rhs));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000593 __ JumpIfNotSmi(r2, &not_smis);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100594 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
595 // 1) Return the answer.
596 // 2) Go to slow.
597 // 3) Fall through to both_loaded_as_doubles.
598 // 4) Jump to lhs_not_nan.
599 // In cases 3 and 4 we have found out we were dealing with a number-number
600 // comparison. If VFP3 is supported the double values of the numbers have
601 // been loaded into d7 and d6. Otherwise, the double values have been loaded
602 // into r0, r1, r2, and r3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603 EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100604
605 __ bind(&both_loaded_as_doubles);
606 // The arguments have been converted to doubles and stored in d6 and d7, if
607 // VFP3 is supported, or in r0, r1, r2, and r3.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000608 __ bind(&lhs_not_nan);
609 Label no_nan;
610 // ARMv7 VFP3 instructions to implement double precision comparison.
611 __ VFPCompareAndSetFlags(d7, d6);
612 Label nan;
613 __ b(vs, &nan);
614 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
615 __ mov(r0, Operand(LESS), LeaveCC, lt);
616 __ mov(r0, Operand(GREATER), LeaveCC, gt);
617 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100618
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619 __ bind(&nan);
620 // If one of the sides was a NaN then the v flag is set. Load r0 with
621 // whatever it takes to make the comparison fail, since comparisons with NaN
622 // always fail.
623 if (cc == lt || cc == le) {
624 __ mov(r0, Operand(GREATER));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100625 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000626 __ mov(r0, Operand(LESS));
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100627 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000628 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100629
630 __ bind(&not_smis);
631 // At this point we know we are dealing with two different objects,
632 // and neither of them is a Smi. The objects are in rhs_ and lhs_.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000633 if (strict()) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100634 // This returns non-equal for some object types, or falls through if it
635 // was not lucky.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000636 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100637 }
638
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 Label check_for_internalized_strings;
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100640 Label flat_string_check;
641 // Check for heap-number-heap-number comparison. Can jump to slow case,
642 // or load both doubles into r0, r1, r2, r3 and jump to the code that handles
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000643 // that case. If the inputs are not doubles then jumps to
644 // check_for_internalized_strings.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100645 // In this case r2 will contain the type of rhs_. Never falls through.
646 EmitCheckForTwoHeapNumbers(masm,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647 lhs,
648 rhs,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100649 &both_loaded_as_doubles,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000650 &check_for_internalized_strings,
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100651 &flat_string_check);
652
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000653 __ bind(&check_for_internalized_strings);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100654 // In the strict case the EmitStrictTwoHeapObjectCompare already took care of
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655 // internalized strings.
656 if (cc == eq && !strict()) {
657 // Returns an answer for two internalized strings or two detectable objects.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100658 // Otherwise jumps to string case or not both strings case.
659 // Assumes that r2 is the type of rhs_ on entry.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000660 EmitCheckForInternalizedStringsOrObjects(
661 masm, lhs, rhs, &flat_string_check, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100662 }
663
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664 // Check for both being sequential one-byte strings,
665 // and inline if that is the case.
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100666 __ bind(&flat_string_check);
667
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000668 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r2, r3, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100669
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
671 r3);
672 if (cc == eq) {
673 StringHelper::GenerateFlatOneByteStringEquals(masm, lhs, rhs, r2, r3, r4);
Ben Murdoch257744e2011-11-30 15:57:28 +0000674 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000675 StringHelper::GenerateCompareFlatOneByteStrings(masm, lhs, rhs, r2, r3, r4,
676 r5);
Ben Murdoch257744e2011-11-30 15:57:28 +0000677 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100678 // Never falls through to here.
679
680 __ bind(&slow);
681
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 __ Push(lhs, rhs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100683 // Figure out which native to call and setup the arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684 if (cc == eq) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000685 __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100686 } else {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100687 int ncr; // NaN compare result
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000688 if (cc == lt || cc == le) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100689 ncr = GREATER;
690 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000691 DCHECK(cc == gt || cc == ge); // remaining cases
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100692 ncr = LESS;
693 }
694 __ mov(r0, Operand(Smi::FromInt(ncr)));
695 __ push(r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100696
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000697 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
698 // tagged as a small integer.
699 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
700 : Runtime::kCompare);
701 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100702
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703 __ bind(&miss);
704 GenerateMiss(masm);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100705}
706
707
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100708void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
709 // We don't allow a GC during a store buffer overflow so there is no need to
710 // store the registers in any particular way, but we do have to store and
711 // restore them.
712 __ stm(db_w, sp, kCallerSaved | lr.bit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713
714 const Register scratch = r1;
715
716 if (save_doubles()) {
717 __ SaveFPRegs(sp, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100718 }
719 const int argument_count = 1;
720 const int fp_argument_count = 0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100721
722 AllowExternalCallThatCantCauseGC scope(masm);
723 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000724 __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100725 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000726 ExternalReference::store_buffer_overflow_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100727 argument_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000728 if (save_doubles()) {
729 __ RestoreFPRegs(sp, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100730 }
731 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0).
732}
733
734
Steve Block44f0eee2011-05-26 01:26:41 +0100735void MathPowStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100736 const Register base = r1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737 const Register exponent = MathPowTaggedDescriptor::exponent();
738 DCHECK(exponent.is(r2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100739 const Register heapnumbermap = r5;
740 const Register heapnumber = r0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000741 const DwVfpRegister double_base = d0;
742 const DwVfpRegister double_exponent = d1;
743 const DwVfpRegister double_result = d2;
744 const DwVfpRegister double_scratch = d3;
745 const SwVfpRegister single_scratch = s6;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100746 const Register scratch = r9;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 const Register scratch2 = r4;
Steve Block44f0eee2011-05-26 01:26:41 +0100748
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100749 Label call_runtime, done, int_exponent;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000750 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100751 Label base_is_smi, unpack_exponent;
752 // The exponent and base are supplied as arguments on the stack.
753 // This can only happen if the stub is called from non-optimized code.
754 // Load input parameters from stack to double registers.
Steve Block44f0eee2011-05-26 01:26:41 +0100755 __ ldr(base, MemOperand(sp, 1 * kPointerSize));
756 __ ldr(exponent, MemOperand(sp, 0 * kPointerSize));
757
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100758 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +0100759
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100760 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
Steve Block44f0eee2011-05-26 01:26:41 +0100761 __ ldr(scratch, FieldMemOperand(base, JSObject::kMapOffset));
762 __ cmp(scratch, heapnumbermap);
763 __ b(ne, &call_runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100764
Ben Murdochc7cc0282012-03-05 14:35:55 +0000765 __ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100766 __ jmp(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000767
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100768 __ bind(&base_is_smi);
769 __ vmov(single_scratch, scratch);
770 __ vcvt_f64_s32(double_base, single_scratch);
771 __ bind(&unpack_exponent);
Ben Murdochc7cc0282012-03-05 14:35:55 +0000772
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100773 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Steve Block44f0eee2011-05-26 01:26:41 +0100774
Steve Block44f0eee2011-05-26 01:26:41 +0100775 __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
776 __ cmp(scratch, heapnumbermap);
777 __ b(ne, &call_runtime);
Steve Block44f0eee2011-05-26 01:26:41 +0100778 __ vldr(double_exponent,
779 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000780 } else if (exponent_type() == TAGGED) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100781 // Base is already in double_base.
782 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
Steve Block44f0eee2011-05-26 01:26:41 +0100783
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100784 __ vldr(double_exponent,
785 FieldMemOperand(exponent, HeapNumber::kValueOffset));
Ben Murdochc7cc0282012-03-05 14:35:55 +0000786 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100787
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000788 if (exponent_type() != INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100789 Label int_exponent_convert;
790 // Detect integer exponents stored as double.
791 __ vcvt_u32_f64(single_scratch, double_exponent);
792 // We do not check for NaN or Infinity here because comparing numbers on
793 // ARM correctly distinguishes NaNs. We end up calling the built-in.
794 __ vcvt_f64_u32(double_scratch, single_scratch);
795 __ VFPCompareAndSetFlags(double_scratch, double_exponent);
796 __ b(eq, &int_exponent_convert);
797
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000798 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100799 // Detect square root case. Crankshaft detects constant +/-0.5 at
800 // compile time and uses DoMathPowHalf instead. We then skip this check
801 // for non-constant cases of +/-0.5 as these hardly occur.
802 Label not_plus_half;
803
804 // Test for 0.5.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000805 __ vmov(double_scratch, 0.5, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100806 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
807 __ b(ne, &not_plus_half);
808
809 // Calculates square root of base. Check for the special case of
810 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 __ vmov(double_scratch, -V8_INFINITY, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100812 __ VFPCompareAndSetFlags(double_base, double_scratch);
813 __ vneg(double_result, double_scratch, eq);
814 __ b(eq, &done);
815
816 // Add +0 to convert -0 to +0.
817 __ vadd(double_scratch, double_base, kDoubleRegZero);
818 __ vsqrt(double_result, double_scratch);
819 __ jmp(&done);
820
821 __ bind(&not_plus_half);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000822 __ vmov(double_scratch, -0.5, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100823 __ VFPCompareAndSetFlags(double_exponent, double_scratch);
824 __ b(ne, &call_runtime);
825
826 // Calculates square root of base. Check for the special case of
827 // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828 __ vmov(double_scratch, -V8_INFINITY, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100829 __ VFPCompareAndSetFlags(double_base, double_scratch);
830 __ vmov(double_result, kDoubleRegZero, eq);
831 __ b(eq, &done);
832
833 // Add +0 to convert -0 to +0.
834 __ vadd(double_scratch, double_base, kDoubleRegZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000835 __ vmov(double_result, 1.0, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100836 __ vsqrt(double_scratch, double_scratch);
837 __ vdiv(double_result, double_result, double_scratch);
838 __ jmp(&done);
839 }
840
841 __ push(lr);
842 {
843 AllowExternalCallThatCantCauseGC scope(masm);
844 __ PrepareCallCFunction(0, 2, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100846 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100848 0, 2);
849 }
850 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100852 __ jmp(&done);
853
854 __ bind(&int_exponent_convert);
855 __ vcvt_u32_f64(single_scratch, double_exponent);
856 __ vmov(scratch, single_scratch);
857 }
858
859 // Calculate power with integer exponent.
860 __ bind(&int_exponent);
861
862 // Get two copies of exponent in the registers scratch and exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000863 if (exponent_type() == INTEGER) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100864 __ mov(scratch, exponent);
865 } else {
866 // Exponent has previously been stored into scratch as untagged integer.
867 __ mov(exponent, scratch);
868 }
869 __ vmov(double_scratch, double_base); // Back up base.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000870 __ vmov(double_result, 1.0, scratch2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100871
872 // Get absolute value of exponent.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000873 __ cmp(scratch, Operand::Zero());
874 __ mov(scratch2, Operand::Zero(), LeaveCC, mi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100875 __ sub(scratch, scratch2, scratch, LeaveCC, mi);
876
877 Label while_true;
878 __ bind(&while_true);
879 __ mov(scratch, Operand(scratch, ASR, 1), SetCC);
880 __ vmul(double_result, double_result, double_scratch, cs);
881 __ vmul(double_scratch, double_scratch, double_scratch, ne);
882 __ b(ne, &while_true);
883
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 __ cmp(exponent, Operand::Zero());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100885 __ b(ge, &done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ vmov(double_scratch, 1.0, scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100887 __ vdiv(double_result, double_scratch, double_result);
888 // Test whether result is zero. Bail out to check for subnormal result.
889 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
890 __ VFPCompareAndSetFlags(double_result, 0.0);
891 __ b(ne, &done);
892 // double_exponent may not containe the exponent value if the input was a
893 // smi. We set it with exponent value before bailing out.
894 __ vmov(single_scratch, exponent);
895 __ vcvt_f64_s32(double_exponent, single_scratch);
896
897 // Returning or bailing out.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000898 Counters* counters = isolate()->counters();
899 if (exponent_type() == ON_STACK) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100900 // The arguments are still on the stack.
901 __ bind(&call_runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000902 __ TailCallRuntime(Runtime::kMathPowRT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100903
904 // The stub is called from non-optimized code, which expects the result
905 // as heap number in exponent.
906 __ bind(&done);
907 __ AllocateHeapNumber(
908 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
909 __ vstr(double_result,
910 FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000911 DCHECK(heapnumber.is(r0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100912 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
913 __ Ret(2);
914 } else {
915 __ push(lr);
916 {
917 AllowExternalCallThatCantCauseGC scope(masm);
918 __ PrepareCallCFunction(0, 2, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000919 __ MovToFloatParameters(double_base, double_exponent);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100920 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000921 ExternalReference::power_double_double_function(isolate()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100922 0, 2);
923 }
924 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000925 __ MovFromFloatResult(double_result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100926
927 __ bind(&done);
928 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
929 __ Ret();
930 }
Steve Block44f0eee2011-05-26 01:26:41 +0100931}
932
933
934bool CEntryStub::NeedsImmovableCode() {
935 return true;
936}
937
938
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000939void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
940 CEntryStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000941 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
942 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
943 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
944 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945 CreateWeakCellStub::GenerateAheadOfTime(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 BinaryOpICStub::GenerateAheadOfTime(isolate);
947 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 StoreFastElementStub::GenerateAheadOfTime(isolate);
949 TypeofStub::GenerateAheadOfTime(isolate);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000950}
951
952
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953void CodeStub::GenerateFPStubs(Isolate* isolate) {
954 // Generate if not already in cache.
955 SaveFPRegsMode mode = kSaveFPRegs;
956 CEntryStub(isolate, 1, mode).GetCode();
957 StoreBufferOverflowStub(isolate, mode).GetCode();
958 isolate->set_fp_stubs_generated(true);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100959}
960
961
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000962void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
963 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
964 stub.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100965}
966
967
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000968void CEntryStub::Generate(MacroAssembler* masm) {
969 // Called from JavaScript; parameters are on stack as if calling JS function.
970 // r0: number of arguments including receiver
971 // r1: pointer to builtin function
972 // fp: frame pointer (restored after C call)
973 // sp: stack pointer (restored as callee's sp after C call)
974 // cp: current context (C callee-saved)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000975 //
976 // If argv_in_register():
977 // r2: pointer to the first argument
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000978 ProfileEntryHookStub::MaybeCallEntryHook(masm);
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000979
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000980 __ mov(r5, Operand(r1));
981
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000982 if (argv_in_register()) {
983 // Move argv into the correct register.
984 __ mov(r1, Operand(r2));
985 } else {
986 // Compute the argv pointer in a callee-saved register.
987 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
988 __ sub(r1, r1, Operand(kPointerSize));
989 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000990
991 // Enter the exit frame that transitions from JavaScript to C++.
992 FrameScope scope(masm, StackFrame::MANUAL);
993 __ EnterExitFrame(save_doubles());
994
995 // Store a copy of argc in callee-saved registers for later.
996 __ mov(r4, Operand(r0));
997
998 // r0, r4: number of arguments including receiver (C callee-saved)
999 // r1: pointer to the first argument (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001000 // r5: pointer to builtin function (C callee-saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001001
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001002 // Result returned in r0 or r0+r1 by default.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001003
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001004#if V8_HOST_ARCH_ARM
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001005 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1006 int frame_alignment_mask = frame_alignment - 1;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001007 if (FLAG_debug_code) {
1008 if (frame_alignment > kPointerSize) {
1009 Label alignment_as_expected;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001010 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
Steve Block1e0659c2011-05-24 12:43:12 +01001011 __ tst(sp, Operand(frame_alignment_mask));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001012 __ b(eq, &alignment_as_expected);
1013 // Don't use Check here, as it will call Runtime_Abort re-entering here.
1014 __ stop("Unexpected alignment");
1015 __ bind(&alignment_as_expected);
1016 }
1017 }
1018#endif
1019
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001020 // Call C built-in.
1021 // r0 = argc, r1 = argv
1022 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
Steve Block44f0eee2011-05-26 01:26:41 +01001023
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001024 // To let the GC traverse the return address of the exit frames, we need to
1025 // know where the return address is. The CEntryStub is unmovable, so
1026 // we can store the address on the stack to be able to find it again and
1027 // we never have to restore it, because it will not change.
Steve Block1e0659c2011-05-24 12:43:12 +01001028 // Compute the return address in lr to return to after the jump below. Pc is
1029 // already at '+ 8' from the current instruction but return is after three
1030 // instructions so add another 4 to pc to get the return address.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031 {
1032 // Prevent literal pool emission before return address.
1033 Assembler::BlockConstPoolScope block_const_pool(masm);
1034 __ add(lr, pc, Operand(4));
1035 __ str(lr, MemOperand(sp, 0));
1036 __ Call(r5);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001037 }
1038
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001039 __ VFPEnsureFPSCRState(r2);
1040
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041 // Check result for exception sentinel.
1042 Label exception_returned;
1043 __ CompareRoot(r0, Heap::kExceptionRootIndex);
1044 __ b(eq, &exception_returned);
1045
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001046 // Check that there is no pending exception, otherwise we
1047 // should have returned the exception sentinel.
1048 if (FLAG_debug_code) {
1049 Label okay;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001050 ExternalReference pending_exception_address(
1051 Isolate::kPendingExceptionAddress, isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001052 __ mov(r2, Operand(pending_exception_address));
1053 __ ldr(r2, MemOperand(r2));
1054 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1055 // Cannot use check here as it attempts to generate call into runtime.
1056 __ b(eq, &okay);
1057 __ stop("Unexpected pending exception");
1058 __ bind(&okay);
1059 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001060
1061 // Exit C frame and return.
1062 // r0:r1: result
1063 // sp: stack pointer
1064 // fp: frame pointer
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065 Register argc;
1066 if (argv_in_register()) {
1067 // We don't want to pop arguments so set argc to no_reg.
1068 argc = no_reg;
1069 } else {
1070 // Callee-saved register r4 still holds argc.
1071 argc = r4;
1072 }
1073 __ LeaveExitFrame(save_doubles(), argc, true);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001074 __ mov(pc, lr);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001075
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001076 // Handling of exception.
1077 __ bind(&exception_returned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001078
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079 ExternalReference pending_handler_context_address(
1080 Isolate::kPendingHandlerContextAddress, isolate());
1081 ExternalReference pending_handler_code_address(
1082 Isolate::kPendingHandlerCodeAddress, isolate());
1083 ExternalReference pending_handler_offset_address(
1084 Isolate::kPendingHandlerOffsetAddress, isolate());
1085 ExternalReference pending_handler_fp_address(
1086 Isolate::kPendingHandlerFPAddress, isolate());
1087 ExternalReference pending_handler_sp_address(
1088 Isolate::kPendingHandlerSPAddress, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001089
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001090 // Ask the runtime for help to determine the handler. This will set r0 to
1091 // contain the current pending exception, don't clobber it.
1092 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1093 isolate());
1094 {
1095 FrameScope scope(masm, StackFrame::MANUAL);
1096 __ PrepareCallCFunction(3, 0, r0);
1097 __ mov(r0, Operand(0));
1098 __ mov(r1, Operand(0));
1099 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
1100 __ CallCFunction(find_handler, 3);
1101 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001102
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001103 // Retrieve the handler context, SP and FP.
1104 __ mov(cp, Operand(pending_handler_context_address));
1105 __ ldr(cp, MemOperand(cp));
1106 __ mov(sp, Operand(pending_handler_sp_address));
1107 __ ldr(sp, MemOperand(sp));
1108 __ mov(fp, Operand(pending_handler_fp_address));
1109 __ ldr(fp, MemOperand(fp));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001111 // If the handler is a JS frame, restore the context to the frame. Note that
1112 // the context will be set to (cp == 0) for non-JS frames.
1113 __ cmp(cp, Operand(0));
1114 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001115
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001116 // Compute the handler entry address and jump to it.
1117 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1118 __ mov(r1, Operand(pending_handler_code_address));
1119 __ ldr(r1, MemOperand(r1));
1120 __ mov(r2, Operand(pending_handler_offset_address));
1121 __ ldr(r2, MemOperand(r2));
1122 __ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1123 if (FLAG_enable_embedded_constant_pool) {
1124 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r1);
1125 }
1126 __ add(pc, r1, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001127}
1128
1129
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001130void JSEntryStub::Generate(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001131 // r0: code entry
1132 // r1: function
1133 // r2: receiver
1134 // r3: argc
1135 // [sp+0]: argv
1136
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001137 Label invoke, handler_entry, exit;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001138
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001139 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1140
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001141 // Called from C, so do not pop argc and args on exit (preserve sp)
1142 // No need to save register-passed args
1143 // Save callee-saved registers (incl. cp and fp), sp, and lr
1144 __ stm(db_w, sp, kCalleeSaved | lr.bit());
1145
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001146 // Save callee-saved vfp registers.
1147 __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
1148 // Set up the reserved register for 0.0.
1149 __ vmov(kDoubleRegZero, 0.0);
1150 __ VFPEnsureFPSCRState(r4);
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001151
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001152 // Get address of argv, see stm above.
1153 // r0: code entry
1154 // r1: function
1155 // r2: receiver
1156 // r3: argc
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001157
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001158 // Set up argv in r4.
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001159 int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001160 offset_to_argv += kNumDoubleCalleeSaved * kDoubleSize;
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001161 __ ldr(r4, MemOperand(sp, offset_to_argv));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001162
1163 // Push a frame with special values setup to mark it as an entry frame.
1164 // r0: code entry
1165 // r1: function
1166 // r2: receiver
1167 // r3: argc
1168 // r4: argv
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001169 int marker = type();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001170 if (FLAG_enable_embedded_constant_pool) {
1171 __ mov(r8, Operand::Zero());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001172 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001173 __ mov(r7, Operand(Smi::FromInt(marker)));
1174 __ mov(r6, Operand(Smi::FromInt(marker)));
Steve Block44f0eee2011-05-26 01:26:41 +01001175 __ mov(r5,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001176 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001177 __ ldr(r5, MemOperand(r5));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178 __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1179 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001180 (FLAG_enable_embedded_constant_pool ? r8.bit() : 0) |
1181 ip.bit());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001182
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001183 // Set up frame pointer for the frame to be pushed.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001184 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1185
Ben Murdochb0fe1622011-05-05 13:52:32 +01001186 // If this is the outermost JS call, set js_entry_sp value.
Steve Block053d10c2011-06-13 19:13:29 +01001187 Label non_outermost_js;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001188 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001189 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
1190 __ ldr(r6, MemOperand(r5));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001191 __ cmp(r6, Operand::Zero());
Steve Block053d10c2011-06-13 19:13:29 +01001192 __ b(ne, &non_outermost_js);
1193 __ str(fp, MemOperand(r5));
1194 __ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1195 Label cont;
1196 __ b(&cont);
1197 __ bind(&non_outermost_js);
1198 __ mov(ip, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1199 __ bind(&cont);
1200 __ push(ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001201
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001202 // Jump to a faked try block that does the invoke, with a faked catch
1203 // block that sets the pending exception.
1204 __ jmp(&invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205
1206 // Block literal pool emission whilst taking the position of the handler
1207 // entry. This avoids making the assumption that literal pools are always
1208 // emitted after an instruction is emitted, rather than before.
1209 {
1210 Assembler::BlockConstPoolScope block_const_pool(masm);
1211 __ bind(&handler_entry);
1212 handler_offset_ = handler_entry.pos();
1213 // Caught exception: Store result (exception) in the pending exception
1214 // field in the JSEnv and return a failure sentinel. Coming in here the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001215 // fp will be invalid because the PushStackHandler below sets it to 0 to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001216 // signal the existence of the JSEntry frame.
1217 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1218 isolate())));
1219 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001220 __ str(r0, MemOperand(ip));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001221 __ LoadRoot(r0, Heap::kExceptionRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001222 __ b(&exit);
1223
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001224 // Invoke: Link this frame into the handler chain.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001225 __ bind(&invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001226 // Must preserve r0-r4, r5-r6 are available.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001227 __ PushStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001228 // If an exception not caught by another handler occurs, this handler
1229 // returns control to the code after the bl(&invoke) above, which
1230 // restores all kCalleeSaved registers (including cp and fp) to their
1231 // saved values before returning a failure to C.
1232
1233 // Clear any pending exceptions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001234 __ mov(r5, Operand(isolate()->factory()->the_hole_value()));
Ben Murdoch589d6972011-11-30 16:04:58 +00001235 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001236 isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001237 __ str(r5, MemOperand(ip));
1238
1239 // Invoke the function by calling through JS entry trampoline builtin.
1240 // Notice that we cannot store a reference to the trampoline code directly in
1241 // this stub, because runtime stubs are not traversed when doing GC.
1242
1243 // Expected registers by Builtins::JSEntryTrampoline
1244 // r0: code entry
1245 // r1: function
1246 // r2: receiver
1247 // r3: argc
1248 // r4: argv
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001249 if (type() == StackFrame::ENTRY_CONSTRUCT) {
Steve Block44f0eee2011-05-26 01:26:41 +01001250 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001251 isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001252 __ mov(ip, Operand(construct_entry));
1253 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001254 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001255 __ mov(ip, Operand(entry));
1256 }
1257 __ ldr(ip, MemOperand(ip)); // deref address
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001258 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001259
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001260 // Branch and link to JSEntryTrampoline.
1261 __ Call(ip);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001262
Steve Block053d10c2011-06-13 19:13:29 +01001263 // Unlink this frame from the handler chain.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001264 __ PopStackHandler();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001265
1266 __ bind(&exit); // r0 holds result
Steve Block053d10c2011-06-13 19:13:29 +01001267 // Check if the current stack frame is marked as the outermost JS frame.
1268 Label non_outermost_js_2;
1269 __ pop(r5);
1270 __ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1271 __ b(ne, &non_outermost_js_2);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001272 __ mov(r6, Operand::Zero());
Steve Block053d10c2011-06-13 19:13:29 +01001273 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
1274 __ str(r6, MemOperand(r5));
1275 __ bind(&non_outermost_js_2);
Steve Block053d10c2011-06-13 19:13:29 +01001276
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001277 // Restore the top frame descriptors from the stack.
1278 __ pop(r3);
Steve Block44f0eee2011-05-26 01:26:41 +01001279 __ mov(ip,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001280 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001281 __ str(r3, MemOperand(ip));
1282
1283 // Reset the stack to the callee saved registers.
1284 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1285
1286 // Restore callee-saved registers and return.
1287#ifdef DEBUG
1288 if (FLAG_debug_code) {
1289 __ mov(lr, Operand(pc));
1290 }
1291#endif
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001292
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 // Restore callee-saved vfp registers.
1294 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001295
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001296 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
1297}
1298
1299
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001300void InstanceOfStub::Generate(MacroAssembler* masm) {
1301 Register const object = r1; // Object (lhs).
1302 Register const function = r0; // Function (rhs).
1303 Register const object_map = r2; // Map of {object}.
1304 Register const function_map = r3; // Map of {function}.
1305 Register const function_prototype = r4; // Prototype of {function}.
1306 Register const scratch = r5;
Steve Block1e0659c2011-05-24 12:43:12 +01001307
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001308 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
1309 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Steve Block1e0659c2011-05-24 12:43:12 +01001310
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001311 // Check if {object} is a smi.
1312 Label object_is_smi;
1313 __ JumpIfSmi(object, &object_is_smi);
Steve Block1e0659c2011-05-24 12:43:12 +01001314
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315 // Lookup the {function} and the {object} map in the global instanceof cache.
1316 // Note: This is safe because we clear the global instanceof cache whenever
1317 // we change the prototype of any object.
1318 Label fast_case, slow_case;
1319 __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
1320 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1321 __ b(ne, &fast_case);
1322 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
1323 __ b(ne, &fast_case);
1324 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
1325 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001326
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001327 // If {object} is a smi we can safely return false if {function} is a JS
1328 // function, otherwise we have to miss to the runtime and throw an exception.
1329 __ bind(&object_is_smi);
1330 __ JumpIfSmi(function, &slow_case);
1331 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1332 __ b(ne, &slow_case);
1333 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
1334 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001335
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001336 // Fast-case: The {function} must be a valid JSFunction.
1337 __ bind(&fast_case);
1338 __ JumpIfSmi(function, &slow_case);
1339 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1340 __ b(ne, &slow_case);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001341
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001342 // Ensure that {function} has an instance prototype.
1343 __ ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
1344 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1345 __ b(ne, &slow_case);
Steve Block1e0659c2011-05-24 12:43:12 +01001346
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001347 // Get the "prototype" (or initial map) of the {function}.
1348 __ ldr(function_prototype,
1349 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1350 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001351
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001352 // Resolve the prototype if the {function} has an initial map. Afterwards the
1353 // {function_prototype} will be either the JSReceiver prototype object or the
1354 // hole value, which means that no instances of the {function} were created so
1355 // far and hence we should return false.
1356 Label function_prototype_valid;
1357 __ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE);
1358 __ b(ne, &function_prototype_valid);
1359 __ ldr(function_prototype,
1360 FieldMemOperand(function_prototype, Map::kPrototypeOffset));
1361 __ bind(&function_prototype_valid);
1362 __ AssertNotSmi(function_prototype);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001363
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001364 // Update the global instanceof cache with the current {object} map and
1365 // {function}. The cached answer will be set when it is known below.
1366 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
1367 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01001368
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001369 // Loop through the prototype chain looking for the {function} prototype.
1370 // Assume true, and change to false if not found.
1371 Register const object_instance_type = function_map;
1372 Register const map_bit_field = function_map;
1373 Register const null = scratch;
1374 Register const result = r0;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001375
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001376 Label done, loop, fast_runtime_fallback;
1377 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1378 __ LoadRoot(null, Heap::kNullValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001379 __ bind(&loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001380
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001381 // Check if the object needs to be access checked.
1382 __ ldrb(map_bit_field, FieldMemOperand(object_map, Map::kBitFieldOffset));
1383 __ tst(map_bit_field, Operand(1 << Map::kIsAccessCheckNeeded));
1384 __ b(ne, &fast_runtime_fallback);
1385 // Check if the current object is a Proxy.
1386 __ CompareInstanceType(object_map, object_instance_type, JS_PROXY_TYPE);
1387 __ b(eq, &fast_runtime_fallback);
Steve Block1e0659c2011-05-24 12:43:12 +01001388
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389 __ ldr(object, FieldMemOperand(object_map, Map::kPrototypeOffset));
1390 __ cmp(object, function_prototype);
1391 __ b(eq, &done);
1392 __ cmp(object, null);
1393 __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
1394 __ b(ne, &loop);
1395 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1396 __ bind(&done);
1397 __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
1398 __ Ret();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001399
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001400 // Found Proxy or access check needed: Call the runtime
1401 __ bind(&fast_runtime_fallback);
1402 __ Push(object, function_prototype);
1403 // Invalidate the instanceof cache.
1404 __ Move(scratch, Smi::FromInt(0));
1405 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
1406 __ TailCallRuntime(Runtime::kHasInPrototypeChain);
Steve Block1e0659c2011-05-24 12:43:12 +01001407
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001408 // Slow-case: Call the %InstanceOf runtime function.
1409 __ bind(&slow_case);
1410 __ Push(object, function);
1411 __ TailCallRuntime(Runtime::kInstanceOf);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001412}
1413
1414
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001415void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1416 Label miss;
1417 Register receiver = LoadDescriptor::ReceiverRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001418 // Ensure that the vector and slot registers won't be clobbered before
1419 // calling the miss handler.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001420 DCHECK(!AreAliased(r4, r5, LoadWithVectorDescriptor::VectorRegister(),
1421 LoadWithVectorDescriptor::SlotRegister()));
Steve Block1e0659c2011-05-24 12:43:12 +01001422
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001423 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r4,
1424 r5, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001425 __ bind(&miss);
1426 PropertyAccessCompiler::TailCallBuiltin(
1427 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1428}
Steve Block1e0659c2011-05-24 12:43:12 +01001429
1430
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001431void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
1432 // Return address is in lr.
1433 Label miss;
1434
1435 Register receiver = LoadDescriptor::ReceiverRegister();
1436 Register index = LoadDescriptor::NameRegister();
1437 Register scratch = r5;
1438 Register result = r0;
1439 DCHECK(!scratch.is(receiver) && !scratch.is(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001440 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
1441 result.is(LoadWithVectorDescriptor::SlotRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001442
1443 // StringCharAtGenerator doesn't use the result register until it's passed
1444 // the different miss possibilities. If it did, we would have a conflict
1445 // when FLAG_vector_ics is true.
1446 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1447 &miss, // When not a string.
1448 &miss, // When not a number.
1449 &miss, // When index out of range.
1450 STRING_INDEX_IS_ARRAY_INDEX,
1451 RECEIVER_IS_STRING);
1452 char_at_generator.GenerateFast(masm);
1453 __ Ret();
1454
1455 StubRuntimeCallHelper call_helper;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001456 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001457
1458 __ bind(&miss);
1459 PropertyAccessCompiler::TailCallBuiltin(
1460 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1461}
1462
1463
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001464void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1465 // The displacement is the offset of the last parameter (if any)
1466 // relative to the frame pointer.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001467 const int kDisplacement =
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001468 StandardFrameConstants::kCallerSPOffset - kPointerSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001469 DCHECK(r1.is(ArgumentsAccessReadDescriptor::index()));
1470 DCHECK(r0.is(ArgumentsAccessReadDescriptor::parameter_count()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001471
1472 // Check that the key is a smi.
1473 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01001474 __ JumpIfNotSmi(r1, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001475
1476 // Check if the calling frame is an arguments adaptor frame.
1477 Label adaptor;
1478 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1479 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1480 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1481 __ b(eq, &adaptor);
1482
1483 // Check index against formal parameters count limit passed in
1484 // through register r0. Use unsigned comparison to get negative
1485 // check for free.
1486 __ cmp(r1, r0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001487 __ b(hs, &slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001488
1489 // Read the argument from the stack and return it.
1490 __ sub(r3, r0, r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001491 __ add(r3, fp, Operand::PointerOffsetFromSmiKey(r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001492 __ ldr(r0, MemOperand(r3, kDisplacement));
1493 __ Jump(lr);
1494
1495 // Arguments adaptor case: Check index against actual arguments
1496 // limit found in the arguments adaptor frame. Use unsigned
1497 // comparison to get negative check for free.
1498 __ bind(&adaptor);
1499 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1500 __ cmp(r1, r0);
1501 __ b(cs, &slow);
1502
1503 // Read the argument from the adaptor frame and return it.
1504 __ sub(r3, r0, r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001505 __ add(r3, r2, Operand::PointerOffsetFromSmiKey(r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001506 __ ldr(r0, MemOperand(r3, kDisplacement));
1507 __ Jump(lr);
1508
1509 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1510 // by calling the runtime system.
1511 __ bind(&slow);
1512 __ push(r1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001513 __ TailCallRuntime(Runtime::kArguments);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001514}
1515
1516
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001517void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001518 // r1 : function
1519 // r2 : number of parameters (tagged)
1520 // r3 : parameters pointer
1521
1522 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
1523 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1524 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001525
1526 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001527 Label runtime;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001528 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1529 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
1530 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001531 __ b(ne, &runtime);
1532
1533 // Patch the arguments.length and the parameters pointer in the current frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1535 __ add(r4, r4, Operand(r2, LSL, 1));
1536 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001537
1538 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001539 __ Push(r1, r3, r2);
1540 __ TailCallRuntime(Runtime::kNewSloppyArguments);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001541}
1542
1543
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001544void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001545 // r1 : function
1546 // r2 : number of parameters (tagged)
1547 // r3 : parameters pointer
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001548 // Registers used over whole function:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001549 // r5 : arguments count (tagged)
1550 // r6 : mapped parameter count (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001551
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001552 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
1553 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1554 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001555
1556 // Check if the calling frame is an arguments adaptor frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557 Label adaptor_frame, try_allocate, runtime;
1558 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1559 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
1560 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001561 __ b(eq, &adaptor_frame);
1562
1563 // No adaptor, parameter count = argument count.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001564 __ mov(r5, r2);
1565 __ mov(r6, r2);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001566 __ b(&try_allocate);
1567
1568 // We have an adaptor frame. Patch the parameters pointer.
1569 __ bind(&adaptor_frame);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001570 __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1571 __ add(r4, r4, Operand(r5, LSL, 1));
1572 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001573
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001574 // r5 = argument count (tagged)
1575 // r6 = parameter count (tagged)
1576 // Compute the mapped parameter count = min(r6, r5) in r6.
1577 __ mov(r6, r2);
1578 __ cmp(r6, Operand(r5));
1579 __ mov(r6, Operand(r5), LeaveCC, gt);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001580
1581 __ bind(&try_allocate);
1582
1583 // Compute the sizes of backing store, parameter map, and arguments object.
1584 // 1. Parameter map, has 2 extra words containing context and backing store.
1585 const int kParameterMapHeaderSize =
1586 FixedArray::kHeaderSize + 2 * kPointerSize;
1587 // If there are no mapped parameters, we do not need the parameter_map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001588 __ cmp(r6, Operand(Smi::FromInt(0)));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001589 __ mov(r9, Operand::Zero(), LeaveCC, eq);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 __ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001591 __ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
1592
1593 // 2. Backing store.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001594 __ add(r9, r9, Operand(r5, LSL, 1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001595 __ add(r9, r9, Operand(FixedArray::kHeaderSize));
1596
1597 // 3. Arguments object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001598 __ add(r9, r9, Operand(Heap::kSloppyArgumentsObjectSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001599
1600 // Do the allocation of all three objects in one go.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001601 __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001602
1603 // r0 = address of new object(s) (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001604 // r2 = argument count (smi-tagged)
1605 // Get the arguments boilerplate from the current native context into r4.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001606 const int kNormalOffset =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001607 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001608 const int kAliasedOffset =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001609 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001610
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001611 __ ldr(r4, NativeContextMemOperand());
1612 __ cmp(r6, Operand::Zero());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001613 __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
1614 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
1615
1616 // r0 = address of new object (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001617 // r2 = argument count (smi-tagged)
1618 // r4 = address of arguments map (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001619 // r6 = mapped parameter count (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001620 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001621 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
1622 __ str(r9, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1623 __ str(r9, FieldMemOperand(r0, JSObject::kElementsOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001624
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001625 // Set up the callee in-object property.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001626 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627 __ AssertNotSmi(r1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001628 const int kCalleeOffset = JSObject::kHeaderSize +
1629 Heap::kArgumentsCalleeIndex * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630 __ str(r1, FieldMemOperand(r0, kCalleeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001631
1632 // Use the length (smi tagged) and set that as an in-object property too.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001633 __ AssertSmi(r5);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001634 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1635 const int kLengthOffset = JSObject::kHeaderSize +
1636 Heap::kArgumentsLengthIndex * kPointerSize;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001637 __ str(r5, FieldMemOperand(r0, kLengthOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001638
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001639 // Set up the elements pointer in the allocated arguments object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001640 // If we allocated a parameter map, r4 will point there, otherwise
1641 // it will point to the backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642 __ add(r4, r0, Operand(Heap::kSloppyArgumentsObjectSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001643 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1644
1645 // r0 = address of new object (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001646 // r2 = argument count (tagged)
1647 // r4 = address of parameter map or backing store (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001648 // r6 = mapped parameter count (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001649 // Initialize parameter map. If there are no mapped arguments, we're done.
1650 Label skip_parameter_map;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001651 __ cmp(r6, Operand(Smi::FromInt(0)));
1652 // Move backing store address to r1, because it is
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001653 // expected there when filling in the unmapped arguments.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654 __ mov(r1, r4, LeaveCC, eq);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001655 __ b(eq, &skip_parameter_map);
1656
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001657 __ LoadRoot(r5, Heap::kSloppyArgumentsElementsMapRootIndex);
1658 __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
1659 __ add(r5, r6, Operand(Smi::FromInt(2)));
1660 __ str(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001661 __ str(cp, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001662 __ add(r5, r4, Operand(r6, LSL, 1));
1663 __ add(r5, r5, Operand(kParameterMapHeaderSize));
1664 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001665
1666 // Copy the parameter slots and the holes in the arguments.
1667 // We need to fill in mapped_parameter_count slots. They index the context,
1668 // where parameters are stored in reverse order, at
1669 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1670 // The mapped parameter thus need to get indices
1671 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1672 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1673 // We loop from right to left.
1674 Label parameters_loop, parameters_test;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675 __ mov(r5, r6);
1676 __ add(r9, r2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
1677 __ sub(r9, r9, Operand(r6));
1678 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1679 __ add(r1, r4, Operand(r5, LSL, 1));
1680 __ add(r1, r1, Operand(kParameterMapHeaderSize));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001681
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001682 // r1 = address of backing store (tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001683 // r4 = address of parameter map (tagged), which is also the address of new
1684 // object + Heap::kSloppyArgumentsObjectSize (tagged)
1685 // r0 = temporary scratch (a.o., for address calculation)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686 // r5 = loop variable (tagged)
1687 // ip = the hole value
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001688 __ jmp(&parameters_test);
1689
1690 __ bind(&parameters_loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001691 __ sub(r5, r5, Operand(Smi::FromInt(1)));
1692 __ mov(r0, Operand(r5, LSL, 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001693 __ add(r0, r0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
1694 __ str(r9, MemOperand(r4, r0));
1695 __ sub(r0, r0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001696 __ str(ip, MemOperand(r1, r0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001697 __ add(r9, r9, Operand(Smi::FromInt(1)));
1698 __ bind(&parameters_test);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001699 __ cmp(r5, Operand(Smi::FromInt(0)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001700 __ b(ne, &parameters_loop);
1701
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001702 // Restore r0 = new object (tagged) and r5 = argument count (tagged).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001703 __ sub(r0, r4, Operand(Heap::kSloppyArgumentsObjectSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001704 __ ldr(r5, FieldMemOperand(r0, kLengthOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001705
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001706 __ bind(&skip_parameter_map);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001707 // r0 = address of new object (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001708 // r1 = address of backing store (tagged)
1709 // r5 = argument count (tagged)
1710 // r6 = mapped parameter count (tagged)
1711 // r9 = scratch
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001712 // Copy arguments header and remaining slots (if there are any).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001713 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
1714 __ str(r9, FieldMemOperand(r1, FixedArray::kMapOffset));
1715 __ str(r5, FieldMemOperand(r1, FixedArray::kLengthOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001716
1717 Label arguments_loop, arguments_test;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001718 __ sub(r3, r3, Operand(r6, LSL, 1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001719 __ jmp(&arguments_test);
1720
1721 __ bind(&arguments_loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001722 __ sub(r3, r3, Operand(kPointerSize));
1723 __ ldr(r4, MemOperand(r3, 0));
1724 __ add(r9, r1, Operand(r6, LSL, 1));
1725 __ str(r4, FieldMemOperand(r9, FixedArray::kHeaderSize));
1726 __ add(r6, r6, Operand(Smi::FromInt(1)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001727
1728 __ bind(&arguments_test);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001729 __ cmp(r6, Operand(r5));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001730 __ b(lt, &arguments_loop);
1731
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 // Return.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001733 __ Ret();
1734
1735 // Do the runtime call to allocate the arguments object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001736 // r0 = address of new object (tagged)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001737 // r5 = argument count (tagged)
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001738 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 __ Push(r1, r3, r5);
1740 __ TailCallRuntime(Runtime::kNewSloppyArguments);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001741}
1742
1743
1744void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1745 // Return address is in lr.
1746 Label slow;
1747
1748 Register receiver = LoadDescriptor::ReceiverRegister();
1749 Register key = LoadDescriptor::NameRegister();
1750
1751 // Check that the key is an array index, that is Uint32.
1752 __ NonNegativeSmiTst(key);
1753 __ b(ne, &slow);
1754
1755 // Everything is fine, call runtime.
1756 __ Push(receiver, key); // Receiver, key.
1757
1758 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001760
1761 __ bind(&slow);
1762 PropertyAccessCompiler::TailCallBuiltin(
1763 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001764}
1765
1766
1767void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001768 // r1 : function
1769 // r2 : number of parameters (tagged)
1770 // r3 : parameters pointer
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001771
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001772 DCHECK(r1.is(ArgumentsAccessNewDescriptor::function()));
1773 DCHECK(r2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1774 DCHECK(r3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1775
1776 // Check if the calling frame is an arguments adaptor frame.
1777 Label try_allocate, runtime;
1778 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1779 __ ldr(r0, MemOperand(r4, StandardFrameConstants::kContextOffset));
1780 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1781 __ b(ne, &try_allocate);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001782
1783 // Patch the arguments.length and the parameters pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001784 __ ldr(r2, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1785 __ add(r4, r4, Operand::PointerOffsetFromSmiKey(r2));
1786 __ add(r3, r4, Operand(StandardFrameConstants::kCallerSPOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001787
1788 // Try the new space allocation. Start out with computing the size
1789 // of the arguments object and the elements array in words.
1790 Label add_arguments_object;
1791 __ bind(&try_allocate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001792 __ SmiUntag(r9, r2, SetCC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001793 __ b(eq, &add_arguments_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001794 __ add(r9, r9, Operand(FixedArray::kHeaderSize / kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001795 __ bind(&add_arguments_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001796 __ add(r9, r9, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001797
1798 // Do the allocation of both objects in one go.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001799 __ Allocate(r9, r0, r4, r5, &runtime,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001800 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001801
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001802 // Get the arguments boilerplate from the current native context.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001803 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r4);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001804
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001805 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001806 __ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex);
1807 __ str(r5, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1808 __ str(r5, FieldMemOperand(r0, JSObject::kElementsOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001809
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001810 // Get the length (smi tagged) and set that as an in-object property too.
Steve Block44f0eee2011-05-26 01:26:41 +01001811 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001812 __ AssertSmi(r2);
1813 __ str(r2,
1814 FieldMemOperand(r0, JSObject::kHeaderSize +
1815 Heap::kArgumentsLengthIndex * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001816
1817 // If there are no actual arguments, we're done.
1818 Label done;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001819 __ cmp(r2, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001820 __ b(eq, &done);
1821
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001822 // Set up the elements pointer in the allocated arguments object and
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001823 // initialize the header in the elements fixed array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001824 __ add(r4, r0, Operand(Heap::kStrictArgumentsObjectSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001825 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001826 __ LoadRoot(r5, Heap::kFixedArrayMapRootIndex);
1827 __ str(r5, FieldMemOperand(r4, FixedArray::kMapOffset));
1828 __ str(r2, FieldMemOperand(r4, FixedArray::kLengthOffset));
1829 __ SmiUntag(r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001830
1831 // Copy the fixed array slots.
1832 Label loop;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001833 // Set up r4 to point to the first array slot.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001834 __ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1835 __ bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001836 // Pre-decrement r3 with kPointerSize on each iteration.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001837 // Pre-decrement in order to skip receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001838 __ ldr(r5, MemOperand(r3, kPointerSize, NegPreIndex));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001839 // Post-increment r4 with kPointerSize on each iteration.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001840 __ str(r5, MemOperand(r4, kPointerSize, PostIndex));
1841 __ sub(r2, r2, Operand(1));
1842 __ cmp(r2, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001843 __ b(ne, &loop);
1844
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001845 // Return.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001846 __ bind(&done);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001847 __ Ret();
1848
1849 // Do the runtime call to allocate the arguments object.
1850 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001851 __ Push(r1, r3, r2);
1852 __ TailCallRuntime(Runtime::kNewStrictArguments);
1853}
1854
1855
1856void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
1857 // r2 : number of parameters (tagged)
1858 // r3 : parameters pointer
1859 // r4 : rest parameter index (tagged)
1860
1861 Label runtime;
1862 __ ldr(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1863 __ ldr(r0, MemOperand(r5, StandardFrameConstants::kContextOffset));
1864 __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1865 __ b(ne, &runtime);
1866
1867 // Patch the arguments.length and the parameters pointer.
1868 __ ldr(r2, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
1869 __ add(r3, r5, Operand::PointerOffsetFromSmiKey(r2));
1870 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
1871
1872 __ bind(&runtime);
1873 __ Push(r2, r3, r4);
1874 __ TailCallRuntime(Runtime::kNewRestParam);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001875}
1876
1877
1878void RegExpExecStub::Generate(MacroAssembler* masm) {
1879 // Just jump directly to runtime if native RegExp is not selected at compile
1880 // time or if regexp entry in generated code is turned off runtime switch or
1881 // at compilation.
1882#ifdef V8_INTERPRETED_REGEXP
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001883 __ TailCallRuntime(Runtime::kRegExpExec);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001884#else // V8_INTERPRETED_REGEXP
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001885
1886 // Stack frame on entry.
1887 // sp[0]: last_match_info (expected JSArray)
1888 // sp[4]: previous index
1889 // sp[8]: subject string
1890 // sp[12]: JSRegExp object
1891
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001892 const int kLastMatchInfoOffset = 0 * kPointerSize;
1893 const int kPreviousIndexOffset = 1 * kPointerSize;
1894 const int kSubjectOffset = 2 * kPointerSize;
1895 const int kJSRegExpOffset = 3 * kPointerSize;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001896
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001897 Label runtime;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001898 // Allocation of registers for this function. These are in callee save
1899 // registers and will be preserved by the call to the native RegExp code, as
1900 // this code is called using the normal C calling convention. When calling
1901 // directly from generated code the native RegExp code will not do a GC and
1902 // therefore the content of these registers are safe to use after the call.
1903 Register subject = r4;
1904 Register regexp_data = r5;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001905 Register last_match_info_elements = no_reg; // will be r6;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001906
1907 // Ensure that a RegExp stack is allocated.
1908 ExternalReference address_of_regexp_stack_memory_address =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001909 ExternalReference::address_of_regexp_stack_memory_address(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001910 ExternalReference address_of_regexp_stack_memory_size =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001911 ExternalReference::address_of_regexp_stack_memory_size(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001912 __ mov(r0, Operand(address_of_regexp_stack_memory_size));
1913 __ ldr(r0, MemOperand(r0, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001914 __ cmp(r0, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001915 __ b(eq, &runtime);
1916
1917 // Check that the first argument is a JSRegExp object.
1918 __ ldr(r0, MemOperand(sp, kJSRegExpOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001919 __ JumpIfSmi(r0, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001920 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
1921 __ b(ne, &runtime);
1922
1923 // Check that the RegExp has been compiled (data contains a fixed array).
1924 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
1925 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001926 __ SmiTst(regexp_data);
1927 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001928 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001929 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001930 }
1931
1932 // regexp_data: RegExp data (FixedArray)
1933 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1934 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
1935 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
1936 __ b(ne, &runtime);
1937
1938 // regexp_data: RegExp data (FixedArray)
1939 // Check that the number of captures fit in the static offsets vector buffer.
1940 __ ldr(r2,
1941 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001942 // Check (number_of_captures + 1) * 2 <= offsets vector size
1943 // Or number_of_captures * 2 <= offsets vector size - 2
1944 // Multiplying by 2 comes for free since r2 is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001945 STATIC_ASSERT(kSmiTag == 0);
1946 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001947 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1948 __ cmp(r2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001949 __ b(hi, &runtime);
1950
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001951 // Reset offset for possibly sliced string.
1952 __ mov(r9, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001953 __ ldr(subject, MemOperand(sp, kSubjectOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001954 __ JumpIfSmi(subject, &runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001955 __ mov(r3, subject); // Make a copy of the original subject string.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001956 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
1957 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001958 // subject: subject string
1959 // r3: subject string
1960 // r0: subject string instance type
1961 // regexp_data: RegExp data (FixedArray)
1962 // Handle subject string according to its encoding and representation:
1963 // (1) Sequential string? If yes, go to (5).
1964 // (2) Anything but sequential or cons? If yes, go to (6).
1965 // (3) Cons string. If the string is flat, replace subject with first string.
1966 // Otherwise bailout.
1967 // (4) Is subject external? If yes, go to (7).
1968 // (5) Sequential string. Load regexp code according to encoding.
1969 // (E) Carry on.
1970 /// [...]
1971
1972 // Deferred code at the end of the stub:
1973 // (6) Not a long external string? If yes, go to (8).
1974 // (7) External string. Make it, offset-wise, look like a sequential string.
1975 // Go to (5).
1976 // (8) Short external string or not a string? If yes, bail out to runtime.
1977 // (9) Sliced string. Replace subject with parent. Go to (4).
1978
1979 Label seq_string /* 5 */, external_string /* 7 */,
1980 check_underlying /* 4 */, not_seq_nor_cons /* 6 */,
1981 not_long_external /* 8 */;
1982
1983 // (1) Sequential string? If yes, go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001984 __ and_(r1,
1985 r0,
1986 Operand(kIsNotStringMask |
1987 kStringRepresentationMask |
1988 kShortExternalStringMask),
1989 SetCC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001990 STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001991 __ b(eq, &seq_string); // Go to (5).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001992
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001993 // (2) Anything but sequential or cons? If yes, go to (6).
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001994 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1995 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001996 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1997 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001998 __ cmp(r1, Operand(kExternalStringTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001999 __ b(ge, &not_seq_nor_cons); // Go to (6).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002000
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002001 // (3) Cons string. Check that it's flat.
2002 // Replace subject with first string and reload instance type.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002003 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002004 __ CompareRoot(r0, Heap::kempty_stringRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002005 __ b(ne, &runtime);
2006 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002007
2008 // (4) Is subject external? If yes, go to (7).
2009 __ bind(&check_underlying);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002010 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
2011 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002012 STATIC_ASSERT(kSeqStringTag == 0);
2013 __ tst(r0, Operand(kStringRepresentationMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002014 // The underlying external string is never a short external string.
2015 STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
2016 STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
2017 __ b(ne, &external_string); // Go to (7).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002018
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002019 // (5) Sequential string. Load regexp code according to encoding.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002020 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002021 // subject: sequential subject string (or look-alike, external string)
2022 // r3: original subject string
2023 // Load previous index and check range before r3 is overwritten. We have to
2024 // use r3 instead of subject here because subject might have been only made
2025 // to look like a sequential string when it actually is an external string.
2026 __ ldr(r1, MemOperand(sp, kPreviousIndexOffset));
2027 __ JumpIfNotSmi(r1, &runtime);
2028 __ ldr(r3, FieldMemOperand(r3, String::kLengthOffset));
2029 __ cmp(r3, Operand(r1));
2030 __ b(ls, &runtime);
2031 __ SmiUntag(r1);
2032
2033 STATIC_ASSERT(4 == kOneByteStringTag);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002034 STATIC_ASSERT(kTwoByteStringTag == 0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002035 __ and_(r0, r0, Operand(kStringEncodingMask));
2036 __ mov(r3, Operand(r0, ASR, 2), SetCC);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002037 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset),
2038 ne);
2039 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002040
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041 // (E) Carry on. String handling is done.
2042 // r6: irregexp code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002043 // Check that the irregexp code has been generated for the actual string
2044 // encoding. If it has, the field contains a code object otherwise it contains
Ben Murdoch257744e2011-11-30 15:57:28 +00002045 // a smi (code flushing support).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002046 __ JumpIfSmi(r6, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002047
2048 // r1: previous index
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002049 // r3: encoding of subject string (1 if one_byte, 0 if two_byte);
2050 // r6: code
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002051 // subject: Subject string
2052 // regexp_data: RegExp data (FixedArray)
2053 // All checks done. Now push arguments for native regexp code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002054 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002055
Steve Block44f0eee2011-05-26 01:26:41 +01002056 // Isolates: note we add an additional parameter here (isolate pointer).
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002057 const int kRegExpExecuteArguments = 9;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002058 const int kParameterRegisters = 4;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002059 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002060
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002061 // Stack pointer now points to cell where return address is to be written.
2062 // Arguments are before that on the stack or in registers.
2063
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002064 // Argument 9 (sp[20]): Pass current isolate address.
2065 __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
2066 __ str(r0, MemOperand(sp, 5 * kPointerSize));
2067
2068 // Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
2069 __ mov(r0, Operand(1));
Steve Block44f0eee2011-05-26 01:26:41 +01002070 __ str(r0, MemOperand(sp, 4 * kPointerSize));
2071
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002072 // Argument 7 (sp[12]): Start (high end) of backtracking stack memory area.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002073 __ mov(r0, Operand(address_of_regexp_stack_memory_address));
2074 __ ldr(r0, MemOperand(r0, 0));
2075 __ mov(r2, Operand(address_of_regexp_stack_memory_size));
2076 __ ldr(r2, MemOperand(r2, 0));
2077 __ add(r0, r0, Operand(r2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002078 __ str(r0, MemOperand(sp, 3 * kPointerSize));
2079
2080 // Argument 6: Set the number of capture registers to zero to force global
2081 // regexps to behave as non-global. This does not affect non-global regexps.
2082 __ mov(r0, Operand::Zero());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002083 __ str(r0, MemOperand(sp, 2 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002084
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002085 // Argument 5 (sp[4]): static offsets vector buffer.
Steve Block44f0eee2011-05-26 01:26:41 +01002086 __ mov(r0,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002087 Operand(ExternalReference::address_of_static_offsets_vector(
2088 isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002089 __ str(r0, MemOperand(sp, 1 * kPointerSize));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002090
2091 // For arguments 4 and 3 get string length, calculate start of string data and
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002092 // calculate the shift of the index (0 for one-byte and 1 for two-byte).
2093 __ add(r7, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002094 __ eor(r3, r3, Operand(1));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002095 // Load the length from the original subject string from the previous stack
2096 // frame. Therefore we have to use fp, which points exactly to two pointer
2097 // sizes below the previous sp. (Because creating a new stack frame pushes
2098 // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
Ben Murdoch589d6972011-11-30 16:04:58 +00002099 __ ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002100 // If slice offset is not 0, load the length from the original sliced string.
2101 // Argument 4, r3: End of string data
2102 // Argument 3, r2: Start of string data
2103 // Prepare start and end index of the input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002104 __ add(r9, r7, Operand(r9, LSL, r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002105 __ add(r2, r9, Operand(r1, LSL, r3));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002106
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002107 __ ldr(r7, FieldMemOperand(subject, String::kLengthOffset));
2108 __ SmiUntag(r7);
2109 __ add(r3, r9, Operand(r7, LSL, r3));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002110
2111 // Argument 2 (r1): Previous index.
2112 // Already there
2113
2114 // Argument 1 (r0): Subject string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002115 __ mov(r0, subject);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002116
2117 // Locate the code entry and call it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002118 __ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
2119 DirectCEntryStub stub(isolate());
2120 stub.GenerateCall(masm, r6);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002121
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002122 __ LeaveExitFrame(false, no_reg, true);
2123
2124 last_match_info_elements = r6;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002125
2126 // r0: result
2127 // subject: subject string (callee saved)
2128 // regexp_data: RegExp data (callee saved)
2129 // last_match_info_elements: Last match info elements (callee saved)
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002130 // Check the result.
2131 Label success;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002132 __ cmp(r0, Operand(1));
2133 // We expect exactly one result since we force the called regexp to behave
2134 // as non-global.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002135 __ b(eq, &success);
2136 Label failure;
Ben Murdoch589d6972011-11-30 16:04:58 +00002137 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002138 __ b(eq, &failure);
Ben Murdoch589d6972011-11-30 16:04:58 +00002139 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002140 // If not exception it can only be retry. Handle that in the runtime system.
2141 __ b(ne, &runtime);
2142 // Result must now be exception. If there is no pending exception already a
2143 // stack overflow (on the backtrack stack) was detected in RegExp code but
2144 // haven't created the exception yet. Handle that in the runtime system.
2145 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002146 __ mov(r1, Operand(isolate()->factory()->the_hole_value()));
Ben Murdoch589d6972011-11-30 16:04:58 +00002147 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002148 isolate())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002149 __ ldr(r0, MemOperand(r2, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00002150 __ cmp(r0, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002151 __ b(eq, &runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002152
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002153 // For exception, throw the exception again.
2154 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002155
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002156 __ bind(&failure);
2157 // For failure and exception return null.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002158 __ mov(r0, Operand(isolate()->factory()->null_value()));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002159 __ add(sp, sp, Operand(4 * kPointerSize));
2160 __ Ret();
2161
2162 // Process the result from the native regexp code.
2163 __ bind(&success);
2164 __ ldr(r1,
2165 FieldMemOperand(regexp_data, JSRegExp::kIrregexpCaptureCountOffset));
2166 // Calculate number of capture registers (number_of_captures + 1) * 2.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002167 // Multiplying by 2 comes for free since r1 is smi-tagged.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002168 STATIC_ASSERT(kSmiTag == 0);
2169 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
2170 __ add(r1, r1, Operand(2)); // r1 was a smi.
2171
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002172 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
2173 __ JumpIfSmi(r0, &runtime);
2174 __ CompareObjectType(r0, r2, r2, JS_ARRAY_TYPE);
2175 __ b(ne, &runtime);
2176 // Check that the JSArray is in fast case.
2177 __ ldr(last_match_info_elements,
2178 FieldMemOperand(r0, JSArray::kElementsOffset));
2179 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
2180 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
2181 __ b(ne, &runtime);
2182 // Check that the last match info has space for the capture registers and the
2183 // additional information.
2184 __ ldr(r0,
2185 FieldMemOperand(last_match_info_elements, FixedArray::kLengthOffset));
2186 __ add(r2, r1, Operand(RegExpImpl::kLastMatchOverhead));
2187 __ cmp(r2, Operand::SmiUntag(r0));
2188 __ b(gt, &runtime);
2189
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002190 // r1: number of capture registers
2191 // r4: subject string
2192 // Store the capture count.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002193 __ SmiTag(r2, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002194 __ str(r2, FieldMemOperand(last_match_info_elements,
2195 RegExpImpl::kLastCaptureCountOffset));
2196 // Store last subject and last input.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002197 __ str(subject,
2198 FieldMemOperand(last_match_info_elements,
2199 RegExpImpl::kLastSubjectOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002200 __ mov(r2, subject);
2201 __ RecordWriteField(last_match_info_elements,
2202 RegExpImpl::kLastSubjectOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002203 subject,
2204 r3,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002205 kLRHasNotBeenSaved,
2206 kDontSaveFPRegs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002207 __ mov(subject, r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002208 __ str(subject,
2209 FieldMemOperand(last_match_info_elements,
2210 RegExpImpl::kLastInputOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002211 __ RecordWriteField(last_match_info_elements,
2212 RegExpImpl::kLastInputOffset,
2213 subject,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002214 r3,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002215 kLRHasNotBeenSaved,
2216 kDontSaveFPRegs);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002217
2218 // Get the static offsets vector filled by the native regexp code.
2219 ExternalReference address_of_static_offsets_vector =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002220 ExternalReference::address_of_static_offsets_vector(isolate());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002221 __ mov(r2, Operand(address_of_static_offsets_vector));
2222
2223 // r1: number of capture registers
2224 // r2: offsets vector
2225 Label next_capture, done;
2226 // Capture register counter starts from number of capture registers and
2227 // counts down until wraping after zero.
2228 __ add(r0,
2229 last_match_info_elements,
2230 Operand(RegExpImpl::kFirstCaptureOffset - kHeapObjectTag));
2231 __ bind(&next_capture);
2232 __ sub(r1, r1, Operand(1), SetCC);
2233 __ b(mi, &done);
2234 // Read the value from the static offsets vector buffer.
2235 __ ldr(r3, MemOperand(r2, kPointerSize, PostIndex));
2236 // Store the smi value in the last match info.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002237 __ SmiTag(r3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002238 __ str(r3, MemOperand(r0, kPointerSize, PostIndex));
2239 __ jmp(&next_capture);
2240 __ bind(&done);
2241
2242 // Return last match info.
2243 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
2244 __ add(sp, sp, Operand(4 * kPointerSize));
2245 __ Ret();
2246
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002247 // Do the runtime call to execute the regexp.
2248 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002249 __ TailCallRuntime(Runtime::kRegExpExec);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002250
2251 // Deferred code for string handling.
2252 // (6) Not a long external string? If yes, go to (8).
2253 __ bind(&not_seq_nor_cons);
2254 // Compare flags are still set.
2255 __ b(gt, &not_long_external); // Go to (8).
2256
2257 // (7) External string. Make it, offset-wise, look like a sequential string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002258 __ bind(&external_string);
2259 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
2260 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
2261 if (FLAG_debug_code) {
2262 // Assert that we do not have a cons or slice (indirect strings) here.
2263 // Sequential strings have already been ruled out.
2264 __ tst(r0, Operand(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002265 __ Assert(eq, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002266 }
2267 __ ldr(subject,
2268 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
2269 // Move the pointer so that offset-wise, it looks like a sequential string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002270 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002271 __ sub(subject,
2272 subject,
2273 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002274 __ jmp(&seq_string); // Go to (5).
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002275
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002276 // (8) Short external string or not a string? If yes, bail out to runtime.
2277 __ bind(&not_long_external);
2278 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
2279 __ tst(r1, Operand(kIsNotStringMask | kShortExternalStringMask));
2280 __ b(ne, &runtime);
2281
2282 // (9) Sliced string. Replace subject with parent. Go to (4).
2283 // Load offset into r9 and replace subject string with parent.
2284 __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
2285 __ SmiUntag(r9);
2286 __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
2287 __ jmp(&check_underlying); // Go to (4).
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002288#endif // V8_INTERPRETED_REGEXP
2289}
2290
2291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002292static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
2293 // r0 : number of arguments to the construct function
2294 // r1 : the function to call
2295 // r2 : feedback vector
2296 // r3 : slot in feedback vector (Smi)
2297 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2298
2299 // Number-of-arguments register must be smi-tagged to call out.
2300 __ SmiTag(r0);
2301 __ Push(r3, r2, r1, r0);
2302
2303 __ CallStub(stub);
2304
2305 __ Pop(r3, r2, r1, r0);
2306 __ SmiUntag(r0);
2307}
2308
2309
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002310static void GenerateRecordCallTarget(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002311 // Cache the called function in a feedback vector slot. Cache states
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002312 // are uninitialized, monomorphic (indicated by a JSFunction), and
2313 // megamorphic.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002314 // r0 : number of arguments to the construct function
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002315 // r1 : the function to call
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002316 // r2 : feedback vector
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002317 // r3 : slot in feedback vector (Smi)
2318 Label initialize, done, miss, megamorphic, not_array_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002319
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002320 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
2321 masm->isolate()->heap()->megamorphic_symbol());
2322 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()),
2323 masm->isolate()->heap()->uninitialized_symbol());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002324
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002325 // Load the cache state into r5.
2326 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
2327 __ ldr(r5, FieldMemOperand(r5, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002328
2329 // A monomorphic cache hit or an already megamorphic state: invoke the
2330 // function without changing the state.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002331 // We don't know if r5 is a WeakCell or a Symbol, but it's harmless to read at
2332 // this position in a symbol (see static asserts in type-feedback-vector.h).
2333 Label check_allocation_site;
2334 Register feedback_map = r6;
2335 Register weak_value = r9;
2336 __ ldr(weak_value, FieldMemOperand(r5, WeakCell::kValueOffset));
2337 __ cmp(r1, weak_value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002338 __ b(eq, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002339 __ CompareRoot(r5, Heap::kmegamorphic_symbolRootIndex);
2340 __ b(eq, &done);
2341 __ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
2342 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
2343 __ b(ne, &check_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002344
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002345 // If the weak cell is cleared, we have a new chance to become monomorphic.
2346 __ JumpIfSmi(weak_value, &initialize);
2347 __ jmp(&megamorphic);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002348
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002349 __ bind(&check_allocation_site);
2350 // If we came here, we need to see if we are the array function.
2351 // If we didn't have a matching function, and we didn't find the megamorph
2352 // sentinel, then we have in the slot either some other function or an
2353 // AllocationSite.
2354 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
2355 __ b(ne, &miss);
2356
2357 // Make sure the function is the Array() function
2358 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
2359 __ cmp(r1, r5);
2360 __ b(ne, &megamorphic);
2361 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002362
2363 __ bind(&miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002364
2365 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2366 // megamorphic.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002367 __ CompareRoot(r5, Heap::kuninitialized_symbolRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002368 __ b(eq, &initialize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002369 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2370 // write-barrier is needed.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002371 __ bind(&megamorphic);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002372 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002373 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002374 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002375 __ jmp(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002376
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002377 // An uninitialized cache is patched with the function
2378 __ bind(&initialize);
2379
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002380 // Make sure the function is the Array() function
2381 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
2382 __ cmp(r1, r5);
2383 __ b(ne, &not_array_function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002384
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002385 // The target function is the Array constructor,
2386 // Create an AllocationSite if we don't already have it, store it in the
2387 // slot.
2388 CreateAllocationSiteStub create_stub(masm->isolate());
2389 CallStubInRecordCallTarget(masm, &create_stub);
2390 __ b(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002391
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002392 __ bind(&not_array_function);
2393 CreateWeakCellStub weak_cell_stub(masm->isolate());
2394 CallStubInRecordCallTarget(masm, &weak_cell_stub);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002395 __ bind(&done);
2396}
2397
2398
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002399void CallConstructStub::Generate(MacroAssembler* masm) {
2400 // r0 : number of arguments
2401 // r1 : the function to call
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002402 // r2 : feedback vector
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002403 // r3 : slot in feedback vector (Smi, for RecordCallTarget)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002404
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002405 Label non_function;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002406 // Check that the function is not a smi.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002407 __ JumpIfSmi(r1, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002408 // Check that the function is a JSFunction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002409 __ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE);
2410 __ b(ne, &non_function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002411
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002412 GenerateRecordCallTarget(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002413
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002414 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
2415 Label feedback_register_initialized;
2416 // Put the AllocationSite from the feedback vector into r2, or undefined.
2417 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
2418 __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
2419 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
2420 __ b(eq, &feedback_register_initialized);
2421 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2422 __ bind(&feedback_register_initialized);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002423
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002424 __ AssertUndefinedOrAllocationSite(r2, r5);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002425
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002426 // Pass function as new target.
2427 __ mov(r3, r1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002428
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002429 // Tail call to the function-specific construct stub (still in the caller
2430 // context at this point).
2431 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2432 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2433 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002434
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002435 __ bind(&non_function);
2436 __ mov(r3, r1);
2437 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002438}
2439
2440
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002441void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002442 // r1 - function
2443 // r3 - slot id
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002444 // r2 - vector
2445 // r4 - allocation site (loaded from vector[slot])
2446 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
2447 __ cmp(r1, r5);
2448 __ b(ne, miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002449
2450 __ mov(r0, Operand(arg_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002451
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002452 // Increment the call count for monomorphic function calls.
2453 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
2454 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
2455 __ ldr(r3, FieldMemOperand(r2, 0));
2456 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2457 __ str(r3, FieldMemOperand(r2, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002458
2459 __ mov(r2, r4);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002460 __ mov(r3, r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002461 ArrayConstructorStub stub(masm->isolate(), arg_count());
2462 __ TailCallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002463}
2464
2465
2466void CallICStub::Generate(MacroAssembler* masm) {
2467 // r1 - function
2468 // r3 - slot id (Smi)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002469 // r2 - vector
2470 Label extra_checks_or_miss, call, call_function;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002471 int argc = arg_count();
2472 ParameterCount actual(argc);
2473
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002474 // The checks. First, does r1 match the recorded monomorphic target?
2475 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
2476 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002477
2478 // We don't know that we have a weak cell. We might have a private symbol
2479 // or an AllocationSite, but the memory is safe to examine.
2480 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2481 // FixedArray.
2482 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2483 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2484 // computed, meaning that it can't appear to be a pointer. If the low bit is
2485 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2486 // to be a pointer.
2487 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2488 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2489 WeakCell::kValueOffset &&
2490 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2491
2492 __ ldr(r5, FieldMemOperand(r4, WeakCell::kValueOffset));
2493 __ cmp(r1, r5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002494 __ b(ne, &extra_checks_or_miss);
2495
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002496 // The compare above could have been a SMI/SMI comparison. Guard against this
2497 // convincing us that we have a monomorphic JSFunction.
2498 __ JumpIfSmi(r1, &extra_checks_or_miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002499
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002500 // Increment the call count for monomorphic function calls.
2501 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
2502 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
2503 __ ldr(r3, FieldMemOperand(r2, 0));
2504 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2505 __ str(r3, FieldMemOperand(r2, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002506
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002507 __ bind(&call_function);
2508 __ mov(r0, Operand(argc));
2509 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
2510 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002511
2512 __ bind(&extra_checks_or_miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002513 Label uninitialized, miss, not_allocation_site;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002514
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002515 __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002516 __ b(eq, &call);
2517
2518 // Verify that r4 contains an AllocationSite
2519 __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
2520 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
2521 __ b(ne, &not_allocation_site);
2522
2523 // We have an allocation site.
2524 HandleArrayCase(masm, &miss);
2525
2526 __ bind(&not_allocation_site);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002527
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002528 // The following cases attempt to handle MISS cases without going to the
2529 // runtime.
2530 if (FLAG_trace_ic) {
2531 __ jmp(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002532 }
2533
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002534 __ CompareRoot(r4, Heap::kuninitialized_symbolRootIndex);
2535 __ b(eq, &uninitialized);
2536
2537 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2538 // to handle it here. More complex cases are dealt with in the runtime.
2539 __ AssertNotSmi(r4);
2540 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
2541 __ b(ne, &miss);
2542 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
2543 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
2544 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002545
2546 __ bind(&call);
2547 __ mov(r0, Operand(argc));
2548 __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
2549 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002550
2551 __ bind(&uninitialized);
2552
2553 // We are going monomorphic, provided we actually have a JSFunction.
2554 __ JumpIfSmi(r1, &miss);
2555
2556 // Goto miss case if we do not have a function.
2557 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
2558 __ b(ne, &miss);
2559
2560 // Make sure the function is not the Array() function, which requires special
2561 // behavior on MISS.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002562 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002563 __ cmp(r1, r4);
2564 __ b(eq, &miss);
2565
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002566 // Make sure the function belongs to the same native context.
2567 __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset));
2568 __ ldr(r4, ContextMemOperand(r4, Context::NATIVE_CONTEXT_INDEX));
2569 __ ldr(ip, NativeContextMemOperand());
2570 __ cmp(r4, ip);
2571 __ b(ne, &miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002572
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002573 // Initialize the call counter.
2574 __ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002575 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002576 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002577
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002578 // Store the function. Use a stub since we need a frame for allocation.
2579 // r2 - vector
2580 // r3 - slot
2581 // r1 - function
2582 {
2583 FrameScope scope(masm, StackFrame::INTERNAL);
2584 CreateWeakCellStub create_stub(masm->isolate());
2585 __ Push(r1);
2586 __ CallStub(&create_stub);
2587 __ Pop(r1);
2588 }
2589
2590 __ jmp(&call_function);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002591
2592 // We are here because tracing is on or we encountered a MISS case we can't
2593 // handle here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002594 __ bind(&miss);
2595 GenerateMiss(masm);
2596
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002597 __ jmp(&call);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002598}
2599
2600
2601void CallICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002602 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002603
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002604 // Push the receiver and the function and feedback info.
2605 __ Push(r1, r2, r3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002606
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002607 // Call the entry.
2608 __ CallRuntime(Runtime::kCallIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002610 // Move result to edi and exit the internal frame.
2611 __ mov(r1, r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002612}
2613
2614
2615// StringCharCodeAtGenerator
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002616void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002617 // If the receiver is a smi trigger the non-string case.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002618 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2619 __ JumpIfSmi(object_, receiver_not_string_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002620
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002621 // Fetch the instance type of the receiver into result register.
2622 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2623 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2624 // If the receiver is not a string trigger the non-string case.
2625 __ tst(result_, Operand(kIsNotStringMask));
2626 __ b(ne, receiver_not_string_);
2627 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002628
2629 // If the index is non-smi trigger the non-smi case.
Steve Block1e0659c2011-05-24 12:43:12 +01002630 __ JumpIfNotSmi(index_, &index_not_smi_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002631 __ bind(&got_smi_index_);
2632
2633 // Check for index out of range.
2634 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002635 __ cmp(ip, Operand(index_));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002636 __ b(ls, index_out_of_range_);
2637
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002638 __ SmiUntag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002639
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002640 StringCharLoadGenerator::Generate(masm,
2641 object_,
2642 index_,
2643 result_,
2644 &call_runtime_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002645
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002646 __ SmiTag(result_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002647 __ bind(&exit_);
2648}
2649
2650
2651void StringCharCodeAtGenerator::GenerateSlow(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002652 MacroAssembler* masm, EmbedMode embed_mode,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002653 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002654 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002655
2656 // Index is not a smi.
2657 __ bind(&index_not_smi_);
2658 // If index is a heap number, try converting it to an integer.
2659 __ CheckMap(index_,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002660 result_,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002661 Heap::kHeapNumberMapRootIndex,
2662 index_not_number_,
Ben Murdoch257744e2011-11-30 15:57:28 +00002663 DONT_DO_SMI_CHECK);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002664 call_helper.BeforeCall(masm);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002665 if (embed_mode == PART_OF_IC_HANDLER) {
2666 __ Push(LoadWithVectorDescriptor::VectorRegister(),
2667 LoadWithVectorDescriptor::SlotRegister(), object_, index_);
2668 } else {
2669 // index_ is consumed by runtime conversion function.
2670 __ Push(object_, index_);
2671 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002672 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002673 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002674 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002675 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002676 // NumberToSmi discards numbers that are not exact integers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002677 __ CallRuntime(Runtime::kNumberToSmi);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002678 }
2679 // Save the conversion result before the pop instructions below
2680 // have a chance to overwrite it.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002681 __ Move(index_, r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002682 if (embed_mode == PART_OF_IC_HANDLER) {
2683 __ Pop(LoadWithVectorDescriptor::VectorRegister(),
2684 LoadWithVectorDescriptor::SlotRegister(), object_);
2685 } else {
2686 __ pop(object_);
2687 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002688 // Reload the instance type.
2689 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
2690 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
2691 call_helper.AfterCall(masm);
2692 // If index is still not a smi, it must be out of range.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002693 __ JumpIfNotSmi(index_, index_out_of_range_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002694 // Otherwise, return to the fast path.
2695 __ jmp(&got_smi_index_);
2696
2697 // Call runtime. We get here when the receiver is a string and the
2698 // index is a number, but the code of getting the actual character
2699 // is too complex (e.g., when the string needs to be flattened).
2700 __ bind(&call_runtime_);
2701 call_helper.BeforeCall(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002702 __ SmiTag(index_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002703 __ Push(object_, index_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002704 __ CallRuntime(Runtime::kStringCharCodeAtRT);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002705 __ Move(result_, r0);
2706 call_helper.AfterCall(masm);
2707 __ jmp(&exit_);
2708
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002709 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002710}
2711
2712
2713// -------------------------------------------------------------------------
2714// StringCharFromCodeGenerator
2715
2716void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2717 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2718 STATIC_ASSERT(kSmiTag == 0);
2719 STATIC_ASSERT(kSmiShiftSize == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002720 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
2721 __ tst(code_, Operand(kSmiTagMask |
2722 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
Steve Block1e0659c2011-05-24 12:43:12 +01002723 __ b(ne, &slow_case_);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002724
2725 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002726 // At this point code register contains smi tagged one-byte char code.
2727 __ add(result_, result_, Operand::PointerOffsetFromSmiKey(code_));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002728 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002729 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002730 __ b(eq, &slow_case_);
2731 __ bind(&exit_);
2732}
2733
2734
2735void StringCharFromCodeGenerator::GenerateSlow(
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002736 MacroAssembler* masm,
2737 const RuntimeCallHelper& call_helper) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002738 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002739
2740 __ bind(&slow_case_);
2741 call_helper.BeforeCall(masm);
2742 __ push(code_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002743 __ CallRuntime(Runtime::kStringCharFromCode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002744 __ Move(result_, r0);
2745 call_helper.AfterCall(masm);
2746 __ jmp(&exit_);
2747
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002748 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002749}
2750
2751
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002752enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002753
2754
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002755void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2756 Register dest,
2757 Register src,
2758 Register count,
2759 Register scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002760 String::Encoding encoding) {
2761 if (FLAG_debug_code) {
2762 // Check that destination is word aligned.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002763 __ tst(dest, Operand(kPointerAlignmentMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002764 __ Check(eq, kDestinationOfCopyNotAligned);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002765 }
2766
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002767 // Assumes word reads and writes are little endian.
2768 // Nothing to do for zero characters.
2769 Label done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002770 if (encoding == String::TWO_BYTE_ENCODING) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002771 __ add(count, count, Operand(count), SetCC);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002772 }
2773
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002774 Register limit = count; // Read until dest equals this.
2775 __ add(limit, dest, Operand(count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002776
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002777 Label loop_entry, loop;
2778 // Copy bytes from src to dest until dest hits limit.
2779 __ b(&loop_entry);
2780 __ bind(&loop);
2781 __ ldrb(scratch, MemOperand(src, 1, PostIndex), lt);
2782 __ strb(scratch, MemOperand(dest, 1, PostIndex));
2783 __ bind(&loop_entry);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002784 __ cmp(dest, Operand(limit));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002785 __ b(lt, &loop);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002786
2787 __ bind(&done);
2788}
2789
2790
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002791void SubStringStub::Generate(MacroAssembler* masm) {
2792 Label runtime;
2793
2794 // Stack frame on entry.
2795 // lr: return address
2796 // sp[0]: to
2797 // sp[4]: from
2798 // sp[8]: string
2799
2800 // This stub is called from the native-call %_SubString(...), so
2801 // nothing can be assumed about the arguments. It is tested that:
2802 // "string" is a sequential string,
2803 // both "from" and "to" are smis, and
2804 // 0 <= from <= to <= string.length.
2805 // If any of these assumptions fail, we call the runtime system.
2806
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002807 const int kToOffset = 0 * kPointerSize;
2808 const int kFromOffset = 1 * kPointerSize;
2809 const int kStringOffset = 2 * kPointerSize;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002810
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002811 __ Ldrd(r2, r3, MemOperand(sp, kToOffset));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002812 STATIC_ASSERT(kFromOffset == kToOffset + 4);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002813 STATIC_ASSERT(kSmiTag == 0);
2814 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002815
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002816 // Arithmetic shift right by one un-smi-tags. In this case we rotate right
2817 // instead because we bail out on non-smi values: ROR and ASR are equivalent
2818 // for smis but they set the flags in a way that's easier to optimize.
2819 __ mov(r2, Operand(r2, ROR, 1), SetCC);
2820 __ mov(r3, Operand(r3, ROR, 1), SetCC, cc);
2821 // If either to or from had the smi tag bit set, then C is set now, and N
2822 // has the same value: we rotated by 1, so the bottom bit is now the top bit.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002823 // We want to bailout to runtime here if From is negative. In that case, the
2824 // next instruction is not executed and we fall through to bailing out to
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002825 // runtime.
2826 // Executed if both r2 and r3 are untagged integers.
2827 __ sub(r2, r2, Operand(r3), SetCC, cc);
2828 // One of the above un-smis or the above SUB could have set N==1.
2829 __ b(mi, &runtime); // Either "from" or "to" is not an smi, or from > to.
Ben Murdoch85b71792012-04-11 18:30:58 +01002830
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002831 // Make sure first argument is a string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002832 __ ldr(r0, MemOperand(sp, kStringOffset));
Ben Murdoch589d6972011-11-30 16:04:58 +00002833 __ JumpIfSmi(r0, &runtime);
2834 Condition is_string = masm->IsObjectStringType(r0, r1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002835 __ b(NegateCondition(is_string), &runtime);
2836
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002837 Label single_char;
2838 __ cmp(r2, Operand(1));
2839 __ b(eq, &single_char);
2840
Ben Murdoch589d6972011-11-30 16:04:58 +00002841 // Short-cut for the case of trivial substring.
2842 Label return_r0;
2843 // r0: original string
2844 // r2: result string length
2845 __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
2846 __ cmp(r2, Operand(r4, ASR, 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002847 // Return original string.
Ben Murdoch589d6972011-11-30 16:04:58 +00002848 __ b(eq, &return_r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002849 // Longer than original string's length or negative: unsafe arguments.
2850 __ b(hi, &runtime);
2851 // Shorter than original string's length: an actual substring.
Ben Murdoch589d6972011-11-30 16:04:58 +00002852
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002853 // Deal with different string types: update the index if necessary
2854 // and put the underlying string into r5.
2855 // r0: original string
2856 // r1: instance type
2857 // r2: length
2858 // r3: from index (untagged)
2859 Label underlying_unpacked, sliced_string, seq_or_external_string;
2860 // If the string is not indirect, it can only be sequential or external.
2861 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2862 STATIC_ASSERT(kIsIndirectStringMask != 0);
2863 __ tst(r1, Operand(kIsIndirectStringMask));
2864 __ b(eq, &seq_or_external_string);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002865
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002866 __ tst(r1, Operand(kSlicedNotConsMask));
2867 __ b(ne, &sliced_string);
2868 // Cons string. Check whether it is flat, then fetch first part.
2869 __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002870 __ CompareRoot(r5, Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002871 __ b(ne, &runtime);
2872 __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
2873 // Update instance type.
2874 __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
2875 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
2876 __ jmp(&underlying_unpacked);
Ben Murdoch589d6972011-11-30 16:04:58 +00002877
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002878 __ bind(&sliced_string);
2879 // Sliced string. Fetch parent and correct start index by offset.
2880 __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
2881 __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
2882 __ add(r3, r3, Operand(r4, ASR, 1)); // Add offset to index.
2883 // Update instance type.
2884 __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
2885 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
2886 __ jmp(&underlying_unpacked);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002887
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002888 __ bind(&seq_or_external_string);
2889 // Sequential or external string. Just move string to the expected register.
2890 __ mov(r5, r0);
2891
2892 __ bind(&underlying_unpacked);
2893
2894 if (FLAG_string_slices) {
2895 Label copy_routine;
2896 // r5: underlying subject string
2897 // r1: instance type of underlying subject string
2898 // r2: length
2899 // r3: adjusted start index (untagged)
2900 __ cmp(r2, Operand(SlicedString::kMinLength));
2901 // Short slice. Copy instead of slicing.
2902 __ b(lt, &copy_routine);
2903 // Allocate new sliced string. At this point we do not reload the instance
2904 // type including the string encoding because we simply rely on the info
2905 // provided by the original string. It does not matter if the original
2906 // string's encoding is wrong because we always have to recheck encoding of
2907 // the newly created string's parent anyways due to externalized strings.
2908 Label two_byte_slice, set_slice_header;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002909 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002910 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2911 __ tst(r1, Operand(kStringEncodingMask));
2912 __ b(eq, &two_byte_slice);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002913 __ AllocateOneByteSlicedString(r0, r2, r6, r4, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002914 __ jmp(&set_slice_header);
2915 __ bind(&two_byte_slice);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002916 __ AllocateTwoByteSlicedString(r0, r2, r6, r4, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002917 __ bind(&set_slice_header);
2918 __ mov(r3, Operand(r3, LSL, 1));
2919 __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
2920 __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
2921 __ jmp(&return_r0);
2922
2923 __ bind(&copy_routine);
2924 }
2925
2926 // r5: underlying subject string
2927 // r1: instance type of underlying subject string
2928 // r2: length
2929 // r3: adjusted start index (untagged)
2930 Label two_byte_sequential, sequential_string, allocate_result;
2931 STATIC_ASSERT(kExternalStringTag != 0);
2932 STATIC_ASSERT(kSeqStringTag == 0);
2933 __ tst(r1, Operand(kExternalStringTag));
2934 __ b(eq, &sequential_string);
2935
2936 // Handle external string.
2937 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002938 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002939 __ tst(r1, Operand(kShortExternalStringTag));
2940 __ b(ne, &runtime);
2941 __ ldr(r5, FieldMemOperand(r5, ExternalString::kResourceDataOffset));
2942 // r5 already points to the first character of underlying string.
2943 __ jmp(&allocate_result);
2944
2945 __ bind(&sequential_string);
2946 // Locate first character of underlying subject string.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002947 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2948 __ add(r5, r5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002949
2950 __ bind(&allocate_result);
2951 // Sequential acii string. Allocate the result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002952 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002953 __ tst(r1, Operand(kStringEncodingMask));
2954 __ b(eq, &two_byte_sequential);
2955
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002956 // Allocate and copy the resulting one-byte string.
2957 __ AllocateOneByteString(r0, r2, r4, r6, r1, &runtime);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002958
2959 // Locate first character of substring to copy.
2960 __ add(r5, r5, r3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002961 // Locate first character of result.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002962 __ add(r1, r0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002963
Ben Murdoch589d6972011-11-30 16:04:58 +00002964 // r0: result string
2965 // r1: first character of result string
2966 // r2: result string length
2967 // r5: first character of substring to copy
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002968 STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2969 StringHelper::GenerateCopyCharacters(
2970 masm, r1, r5, r2, r3, String::ONE_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002971 __ jmp(&return_r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002972
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002973 // Allocate and copy the resulting two-byte string.
2974 __ bind(&two_byte_sequential);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002975 __ AllocateTwoByteString(r0, r2, r4, r6, r1, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002976
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002977 // Locate first character of substring to copy.
Ben Murdoch589d6972011-11-30 16:04:58 +00002978 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002979 __ add(r5, r5, Operand(r3, LSL, 1));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002980 // Locate first character of result.
2981 __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch589d6972011-11-30 16:04:58 +00002982
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002983 // r0: result string.
2984 // r1: first character of result.
2985 // r2: result length.
Ben Murdoch589d6972011-11-30 16:04:58 +00002986 // r5: first character of substring to copy.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002987 STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002988 StringHelper::GenerateCopyCharacters(
2989 masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING);
Ben Murdoch589d6972011-11-30 16:04:58 +00002990
2991 __ bind(&return_r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002992 Counters* counters = isolate()->counters();
Steve Block44f0eee2011-05-26 01:26:41 +01002993 __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002994 __ Drop(3);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002995 __ Ret();
2996
2997 // Just jump to runtime to create the sub string.
2998 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002999 __ TailCallRuntime(Runtime::kSubString);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003000
3001 __ bind(&single_char);
3002 // r0: original string
3003 // r1: instance type
3004 // r2: length
3005 // r3: from index (untagged)
3006 __ SmiTag(r3, r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003007 StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime,
3008 STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003009 generator.GenerateFast(masm);
3010 __ Drop(3);
3011 __ Ret();
3012 generator.SkipSlow(masm, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003013}
3014
3015
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003016void ToNumberStub::Generate(MacroAssembler* masm) {
3017 // The ToNumber stub takes one argument in r0.
3018 Label not_smi;
3019 __ JumpIfNotSmi(r0, &not_smi);
3020 __ Ret();
3021 __ bind(&not_smi);
3022
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003023 __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
3024 // r0: receiver
3025 // r1: receiver instance type
3026 __ Ret(eq);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003027
3028 Label not_string, slow_string;
3029 __ cmp(r1, Operand(FIRST_NONSTRING_TYPE));
3030 __ b(hs, &not_string);
3031 // Check if string has a cached array index.
3032 __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset));
3033 __ tst(r2, Operand(String::kContainsCachedArrayIndexMask));
3034 __ b(ne, &slow_string);
3035 __ IndexFromHash(r2, r0);
3036 __ Ret();
3037 __ bind(&slow_string);
3038 __ push(r0); // Push argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003039 __ TailCallRuntime(Runtime::kStringToNumber);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003040 __ bind(&not_string);
3041
3042 Label not_oddball;
3043 __ cmp(r1, Operand(ODDBALL_TYPE));
3044 __ b(ne, &not_oddball);
3045 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
3046 __ Ret();
3047 __ bind(&not_oddball);
3048
3049 __ push(r0); // Push argument.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003050 __ TailCallRuntime(Runtime::kToNumber);
3051}
3052
3053
3054void ToLengthStub::Generate(MacroAssembler* masm) {
3055 // The ToLength stub takes one argument in r0.
3056 Label not_smi;
3057 __ JumpIfNotSmi(r0, &not_smi);
3058 STATIC_ASSERT(kSmiTag == 0);
3059 __ tst(r0, r0);
3060 __ mov(r0, Operand(0), LeaveCC, lt);
3061 __ Ret();
3062 __ bind(&not_smi);
3063
3064 __ push(r0); // Push argument.
3065 __ TailCallRuntime(Runtime::kToLength);
3066}
3067
3068
3069void ToStringStub::Generate(MacroAssembler* masm) {
3070 // The ToString stub takes one argument in r0.
3071 Label is_number;
3072 __ JumpIfSmi(r0, &is_number);
3073
3074 __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
3075 // r0: receiver
3076 // r1: receiver instance type
3077 __ Ret(lo);
3078
3079 Label not_heap_number;
3080 __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
3081 __ b(ne, &not_heap_number);
3082 __ bind(&is_number);
3083 NumberToStringStub stub(isolate());
3084 __ TailCallStub(&stub);
3085 __ bind(&not_heap_number);
3086
3087 Label not_oddball;
3088 __ cmp(r1, Operand(ODDBALL_TYPE));
3089 __ b(ne, &not_oddball);
3090 __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
3091 __ Ret();
3092 __ bind(&not_oddball);
3093
3094 __ push(r0); // Push argument.
3095 __ TailCallRuntime(Runtime::kToString);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003096}
3097
3098
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003099void StringHelper::GenerateFlatOneByteStringEquals(
3100 MacroAssembler* masm, Register left, Register right, Register scratch1,
3101 Register scratch2, Register scratch3) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003102 Register length = scratch1;
3103
3104 // Compare lengths.
3105 Label strings_not_equal, check_zero_length;
3106 __ ldr(length, FieldMemOperand(left, String::kLengthOffset));
3107 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
3108 __ cmp(length, scratch2);
3109 __ b(eq, &check_zero_length);
3110 __ bind(&strings_not_equal);
3111 __ mov(r0, Operand(Smi::FromInt(NOT_EQUAL)));
3112 __ Ret();
3113
3114 // Check if the length is zero.
3115 Label compare_chars;
3116 __ bind(&check_zero_length);
3117 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003118 __ cmp(length, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003119 __ b(ne, &compare_chars);
3120 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
3121 __ Ret();
3122
3123 // Compare characters.
3124 __ bind(&compare_chars);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003125 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, scratch3,
3126 &strings_not_equal);
Ben Murdoch257744e2011-11-30 15:57:28 +00003127
3128 // Characters are equal.
3129 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
3130 __ Ret();
3131}
3132
3133
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003134void StringHelper::GenerateCompareFlatOneByteStrings(
3135 MacroAssembler* masm, Register left, Register right, Register scratch1,
3136 Register scratch2, Register scratch3, Register scratch4) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003137 Label result_not_equal, compare_lengths;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003138 // Find minimum length and length difference.
3139 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset));
3140 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset));
3141 __ sub(scratch3, scratch1, Operand(scratch2), SetCC);
3142 Register length_delta = scratch3;
3143 __ mov(scratch1, scratch2, LeaveCC, gt);
3144 Register min_length = scratch1;
3145 STATIC_ASSERT(kSmiTag == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003146 __ cmp(min_length, Operand::Zero());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003147 __ b(eq, &compare_lengths);
3148
Ben Murdoch257744e2011-11-30 15:57:28 +00003149 // Compare loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003150 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
3151 scratch4, &result_not_equal);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003152
Ben Murdoch257744e2011-11-30 15:57:28 +00003153 // Compare lengths - strings up to min-length are equal.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003154 __ bind(&compare_lengths);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003155 DCHECK(Smi::FromInt(EQUAL) == static_cast<Smi*>(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003156 // Use length_delta as result if it's zero.
3157 __ mov(r0, Operand(length_delta), SetCC);
3158 __ bind(&result_not_equal);
3159 // Conditionally update the result based either on length_delta or
3160 // the last comparion performed in the loop above.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003161 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt);
3162 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt);
3163 __ Ret();
3164}
3165
3166
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003167void StringHelper::GenerateOneByteCharsCompareLoop(
3168 MacroAssembler* masm, Register left, Register right, Register length,
3169 Register scratch1, Register scratch2, Label* chars_not_equal) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003170 // Change index to run from -length to -1 by adding length to string
3171 // start. This means that loop ends when index reaches zero, which
3172 // doesn't need an additional compare.
3173 __ SmiUntag(length);
3174 __ add(scratch1, length,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003175 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
Ben Murdoch257744e2011-11-30 15:57:28 +00003176 __ add(left, left, Operand(scratch1));
3177 __ add(right, right, Operand(scratch1));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003178 __ rsb(length, length, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003179 Register index = length; // index = -length;
3180
3181 // Compare loop.
3182 Label loop;
3183 __ bind(&loop);
3184 __ ldrb(scratch1, MemOperand(left, index));
3185 __ ldrb(scratch2, MemOperand(right, index));
3186 __ cmp(scratch1, scratch2);
3187 __ b(ne, chars_not_equal);
3188 __ add(index, index, Operand(1), SetCC);
3189 __ b(ne, &loop);
3190}
3191
3192
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003193void StringCompareStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003194 // ----------- S t a t e -------------
3195 // -- r1 : left
3196 // -- r0 : right
3197 // -- lr : return address
3198 // -----------------------------------
3199 __ AssertString(r1);
3200 __ AssertString(r0);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003201
3202 Label not_same;
3203 __ cmp(r0, r1);
3204 __ b(ne, &not_same);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003205 __ mov(r0, Operand(Smi::FromInt(EQUAL)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003206 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r1,
3207 r2);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003208 __ Ret();
3209
3210 __ bind(&not_same);
3211
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003212 // Check that both objects are sequential one-byte strings.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003213 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003214 __ JumpIfNotBothSequentialOneByteStrings(r1, r0, r2, r3, &runtime);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003215
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003216 // Compare flat one-byte strings natively.
3217 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
3218 r3);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003219 StringHelper::GenerateCompareFlatOneByteStrings(masm, r1, r0, r2, r3, r4, r5);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003220
3221 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3222 // tagged as a small integer.
3223 __ bind(&runtime);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003224 __ Push(r1, r0);
3225 __ TailCallRuntime(Runtime::kStringCompare);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003226}
3227
3228
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003229void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3230 // ----------- S t a t e -------------
3231 // -- r1 : left
3232 // -- r0 : right
3233 // -- lr : return address
3234 // -----------------------------------
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003235
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003236 // Load r2 with the allocation site. We stick an undefined dummy value here
3237 // and replace it with the real allocation site later when we instantiate this
3238 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3239 __ Move(r2, handle(isolate()->heap()->undefined_value()));
Steve Block44f0eee2011-05-26 01:26:41 +01003240
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003241 // Make sure that we actually patched the allocation site.
3242 if (FLAG_debug_code) {
3243 __ tst(r2, Operand(kSmiTagMask));
3244 __ Assert(ne, kExpectedAllocationSite);
3245 __ push(r2);
3246 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
3247 __ LoadRoot(ip, Heap::kAllocationSiteMapRootIndex);
3248 __ cmp(r2, ip);
3249 __ pop(r2);
3250 __ Assert(eq, kExpectedAllocationSite);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003251 }
3252
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003253 // Tail call into the stub that handles binary operations with allocation
3254 // sites.
3255 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3256 __ TailCallStub(&stub);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003257}
3258
3259
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003260void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3261 DCHECK_EQ(CompareICState::BOOLEAN, state());
3262 Label miss;
3263
3264 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3265 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3266 if (op() != Token::EQ_STRICT && is_strong(strength())) {
3267 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3268 } else {
3269 if (!Token::IsEqualityOp(op())) {
3270 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset));
3271 __ AssertSmi(r1);
3272 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
3273 __ AssertSmi(r0);
3274 }
3275 __ sub(r0, r1, r0);
3276 __ Ret();
3277 }
3278
3279 __ bind(&miss);
3280 GenerateMiss(masm);
3281}
3282
3283
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003284void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3285 DCHECK(state() == CompareICState::SMI);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003286 Label miss;
3287 __ orr(r2, r1, r0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003288 __ JumpIfNotSmi(r2, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003289
3290 if (GetCondition() == eq) {
3291 // For equality we do not care about the sign of the result.
3292 __ sub(r0, r0, r1, SetCC);
3293 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01003294 // Untag before subtracting to avoid handling overflow.
3295 __ SmiUntag(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003296 __ sub(r0, r1, Operand::SmiUntag(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003297 }
3298 __ Ret();
3299
3300 __ bind(&miss);
3301 GenerateMiss(masm);
3302}
3303
3304
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003305void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3306 DCHECK(state() == CompareICState::NUMBER);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003307
3308 Label generic_stub;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003309 Label unordered, maybe_undefined1, maybe_undefined2;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003310 Label miss;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003312 if (left() == CompareICState::SMI) {
3313 __ JumpIfNotSmi(r1, &miss);
3314 }
3315 if (right() == CompareICState::SMI) {
3316 __ JumpIfNotSmi(r0, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003317 }
3318
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003319 // Inlining the double comparison and falling back to the general compare
3320 // stub if NaN is involved.
3321 // Load left and right operand.
3322 Label done, left, left_smi, right_smi;
3323 __ JumpIfSmi(r0, &right_smi);
3324 __ CheckMap(r0, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
3325 DONT_DO_SMI_CHECK);
3326 __ sub(r2, r0, Operand(kHeapObjectTag));
3327 __ vldr(d1, r2, HeapNumber::kValueOffset);
3328 __ b(&left);
3329 __ bind(&right_smi);
3330 __ SmiToDouble(d1, r0);
3331
3332 __ bind(&left);
3333 __ JumpIfSmi(r1, &left_smi);
3334 __ CheckMap(r1, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
3335 DONT_DO_SMI_CHECK);
3336 __ sub(r2, r1, Operand(kHeapObjectTag));
3337 __ vldr(d0, r2, HeapNumber::kValueOffset);
3338 __ b(&done);
3339 __ bind(&left_smi);
3340 __ SmiToDouble(d0, r1);
3341
3342 __ bind(&done);
3343 // Compare operands.
3344 __ VFPCompareAndSetFlags(d0, d1);
3345
3346 // Don't base result on status bits when a NaN is involved.
3347 __ b(vs, &unordered);
3348
3349 // Return a result of -1, 0, or 1, based on status bits.
3350 __ mov(r0, Operand(EQUAL), LeaveCC, eq);
3351 __ mov(r0, Operand(LESS), LeaveCC, lt);
3352 __ mov(r0, Operand(GREATER), LeaveCC, gt);
3353 __ Ret();
3354
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003355 __ bind(&unordered);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003356 __ bind(&generic_stub);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003357 CompareICStub stub(isolate(), op(), strength(), CompareICState::GENERIC,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003358 CompareICState::GENERIC, CompareICState::GENERIC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003359 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3360
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003361 __ bind(&maybe_undefined1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003362 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003363 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
3364 __ b(ne, &miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003365 __ JumpIfSmi(r1, &unordered);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003366 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
3367 __ b(ne, &maybe_undefined2);
3368 __ jmp(&unordered);
3369 }
3370
3371 __ bind(&maybe_undefined2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003372 if (Token::IsOrderedRelationalCompareOp(op())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003373 __ CompareRoot(r1, Heap::kUndefinedValueRootIndex);
3374 __ b(eq, &unordered);
3375 }
3376
Ben Murdochb0fe1622011-05-05 13:52:32 +01003377 __ bind(&miss);
3378 GenerateMiss(masm);
3379}
3380
3381
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003382void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3383 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
Ben Murdoch257744e2011-11-30 15:57:28 +00003384 Label miss;
3385
3386 // Registers containing left and right operands respectively.
3387 Register left = r1;
3388 Register right = r0;
3389 Register tmp1 = r2;
3390 Register tmp2 = r3;
3391
3392 // Check that both operands are heap objects.
3393 __ JumpIfEitherSmi(left, right, &miss);
3394
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003395 // Check that both operands are internalized strings.
Ben Murdoch257744e2011-11-30 15:57:28 +00003396 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3397 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3398 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3399 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003400 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3401 __ orr(tmp1, tmp1, Operand(tmp2));
3402 __ tst(tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask));
3403 __ b(ne, &miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003404
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003405 // Internalized strings are compared by identity.
Ben Murdoch257744e2011-11-30 15:57:28 +00003406 __ cmp(left, right);
3407 // Make sure r0 is non-zero. At this point input operands are
3408 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003409 DCHECK(right.is(r0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003410 STATIC_ASSERT(EQUAL == 0);
3411 STATIC_ASSERT(kSmiTag == 0);
3412 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
3413 __ Ret();
3414
3415 __ bind(&miss);
3416 GenerateMiss(masm);
3417}
3418
3419
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003420void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3421 DCHECK(state() == CompareICState::UNIQUE_NAME);
3422 DCHECK(GetCondition() == eq);
Ben Murdoch257744e2011-11-30 15:57:28 +00003423 Label miss;
3424
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003425 // Registers containing left and right operands respectively.
3426 Register left = r1;
3427 Register right = r0;
3428 Register tmp1 = r2;
3429 Register tmp2 = r3;
3430
3431 // Check that both operands are heap objects.
3432 __ JumpIfEitherSmi(left, right, &miss);
3433
3434 // Check that both operands are unique names. This leaves the instance
3435 // types loaded in tmp1 and tmp2.
3436 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3437 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3438 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3439 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3440
3441 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
3442 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
3443
3444 // Unique names are compared by identity.
3445 __ cmp(left, right);
3446 // Make sure r0 is non-zero. At this point input operands are
3447 // guaranteed to be non-zero.
3448 DCHECK(right.is(r0));
3449 STATIC_ASSERT(EQUAL == 0);
3450 STATIC_ASSERT(kSmiTag == 0);
3451 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
3452 __ Ret();
3453
3454 __ bind(&miss);
3455 GenerateMiss(masm);
3456}
3457
3458
3459void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3460 DCHECK(state() == CompareICState::STRING);
3461 Label miss;
3462
3463 bool equality = Token::IsEqualityOp(op());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003464
Ben Murdoch257744e2011-11-30 15:57:28 +00003465 // Registers containing left and right operands respectively.
3466 Register left = r1;
3467 Register right = r0;
3468 Register tmp1 = r2;
3469 Register tmp2 = r3;
3470 Register tmp3 = r4;
3471 Register tmp4 = r5;
3472
3473 // Check that both operands are heap objects.
3474 __ JumpIfEitherSmi(left, right, &miss);
3475
3476 // Check that both operands are strings. This leaves the instance
3477 // types loaded in tmp1 and tmp2.
3478 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
3479 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3480 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
3481 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
3482 STATIC_ASSERT(kNotStringTag != 0);
3483 __ orr(tmp3, tmp1, tmp2);
3484 __ tst(tmp3, Operand(kIsNotStringMask));
3485 __ b(ne, &miss);
3486
3487 // Fast check for identical strings.
3488 __ cmp(left, right);
3489 STATIC_ASSERT(EQUAL == 0);
3490 STATIC_ASSERT(kSmiTag == 0);
3491 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq);
3492 __ Ret(eq);
3493
3494 // Handle not identical strings.
3495
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003496 // Check that both strings are internalized strings. If they are, we're done
3497 // because we already know they are not identical. We know they are both
3498 // strings.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003499 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003500 DCHECK(GetCondition() == eq);
3501 STATIC_ASSERT(kInternalizedTag == 0);
3502 __ orr(tmp3, tmp1, Operand(tmp2));
3503 __ tst(tmp3, Operand(kIsNotInternalizedMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003504 // Make sure r0 is non-zero. At this point input operands are
3505 // guaranteed to be non-zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003506 DCHECK(right.is(r0));
3507 __ Ret(eq);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003508 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003509
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003510 // Check that both strings are sequential one-byte.
Ben Murdoch257744e2011-11-30 15:57:28 +00003511 Label runtime;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003512 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
3513 &runtime);
Ben Murdoch257744e2011-11-30 15:57:28 +00003514
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003515 // Compare flat one-byte strings. Returns when done.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003516 if (equality) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003517 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, tmp2,
3518 tmp3);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003519 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003520 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3521 tmp2, tmp3, tmp4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003522 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003523
3524 // Handle more complex cases in runtime.
3525 __ bind(&runtime);
3526 __ Push(left, right);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003527 if (equality) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003528 __ TailCallRuntime(Runtime::kStringEquals);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003529 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003530 __ TailCallRuntime(Runtime::kStringCompare);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003531 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003532
3533 __ bind(&miss);
3534 GenerateMiss(masm);
3535}
3536
3537
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003538void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3539 DCHECK_EQ(CompareICState::RECEIVER, state());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003540 Label miss;
3541 __ and_(r2, r1, Operand(r0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003542 __ JumpIfSmi(r2, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003543
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003544 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3545 __ CompareObjectType(r0, r2, r2, FIRST_JS_RECEIVER_TYPE);
3546 __ b(lt, &miss);
3547 __ CompareObjectType(r1, r2, r2, FIRST_JS_RECEIVER_TYPE);
3548 __ b(lt, &miss);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003549
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003550 DCHECK(GetCondition() == eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003551 __ sub(r0, r0, Operand(r1));
3552 __ Ret();
3553
3554 __ bind(&miss);
3555 GenerateMiss(masm);
3556}
3557
3558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003559void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003560 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003561 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003562 __ and_(r2, r1, Operand(r0));
3563 __ JumpIfSmi(r2, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003564 __ GetWeakValue(r4, cell);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003565 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
3566 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003567 __ cmp(r2, r4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003568 __ b(ne, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003569 __ cmp(r3, r4);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003570 __ b(ne, &miss);
Ben Murdochc7cc0282012-03-05 14:35:55 +00003571
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003572 if (Token::IsEqualityOp(op())) {
3573 __ sub(r0, r0, Operand(r1));
3574 __ Ret();
3575 } else if (is_strong(strength())) {
3576 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3577 } else {
3578 if (op() == Token::LT || op() == Token::LTE) {
3579 __ mov(r2, Operand(Smi::FromInt(GREATER)));
3580 } else {
3581 __ mov(r2, Operand(Smi::FromInt(LESS)));
3582 }
3583 __ Push(r1, r0, r2);
3584 __ TailCallRuntime(Runtime::kCompare);
3585 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003586
3587 __ bind(&miss);
3588 GenerateMiss(masm);
3589}
3590
3591
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003592void CompareICStub::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003593 {
3594 // Call the runtime system in a fresh internal frame.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003595 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003596 __ Push(r1, r0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003597 __ Push(lr, r1, r0);
3598 __ mov(ip, Operand(Smi::FromInt(op())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003599 __ push(ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003600 __ CallRuntime(Runtime::kCompareIC_Miss);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003601 // Compute the entry point of the rewritten stub.
3602 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
3603 // Restore registers.
3604 __ pop(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003605 __ Pop(r1, r0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003606 }
3607
Ben Murdochb0fe1622011-05-05 13:52:32 +01003608 __ Jump(r2);
3609}
3610
3611
Steve Block1e0659c2011-05-24 12:43:12 +01003612void DirectCEntryStub::Generate(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003613 // Place the return address on the stack, making the call
3614 // GC safe. The RegExp backend also relies on this.
3615 __ str(lr, MemOperand(sp, 0));
3616 __ blx(ip); // Call the C++ function.
3617 __ VFPEnsureFPSCRState(r2);
Steve Block1e0659c2011-05-24 12:43:12 +01003618 __ ldr(pc, MemOperand(sp, 0));
3619}
3620
3621
3622void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003623 Register target) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003624 intptr_t code =
3625 reinterpret_cast<intptr_t>(GetCode().location());
3626 __ Move(ip, target);
3627 __ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
3628 __ blx(lr); // Call the stub.
Steve Block1e0659c2011-05-24 12:43:12 +01003629}
3630
3631
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003632void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3633 Label* miss,
3634 Label* done,
3635 Register receiver,
3636 Register properties,
3637 Handle<Name> name,
3638 Register scratch0) {
3639 DCHECK(name->IsUniqueName());
Ben Murdoch257744e2011-11-30 15:57:28 +00003640 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3641 // not equal to the name and kProbes-th slot is not used (its name is the
3642 // undefined value), it guarantees the hash table doesn't contain the
3643 // property. It's true even if some slots represent deleted properties
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003644 // (their names are the hole value).
Ben Murdoch257744e2011-11-30 15:57:28 +00003645 for (int i = 0; i < kInlinedProbes; i++) {
3646 // scratch0 points to properties hash.
3647 // Compute the masked index: (hash + i + i * i) & mask.
3648 Register index = scratch0;
3649 // Capacity is smi 2^n.
3650 __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
3651 __ sub(index, index, Operand(1));
3652 __ and_(index, index, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003653 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i))));
Ben Murdoch257744e2011-11-30 15:57:28 +00003654
3655 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003656 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003657 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
3658
3659 Register entity_name = scratch0;
3660 // Having undefined at this place means the name is not contained.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003661 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003662 Register tmp = properties;
3663 __ add(tmp, properties, Operand(index, LSL, 1));
3664 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
3665
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003666 DCHECK(!tmp.is(entity_name));
Ben Murdoch257744e2011-11-30 15:57:28 +00003667 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3668 __ cmp(entity_name, tmp);
3669 __ b(eq, done);
3670
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003671 // Load the hole ready for use below:
3672 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003673
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003674 // Stop if found the property.
3675 __ cmp(entity_name, Operand(Handle<Name>(name)));
3676 __ b(eq, miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003677
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003678 Label good;
3679 __ cmp(entity_name, tmp);
3680 __ b(eq, &good);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003681
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003682 // Check if the entry name is not a unique name.
3683 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
3684 __ ldrb(entity_name,
3685 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
3686 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3687 __ bind(&good);
Ben Murdoch257744e2011-11-30 15:57:28 +00003688
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003689 // Restore the properties.
3690 __ ldr(properties,
3691 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003692 }
3693
3694 const int spill_mask =
3695 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() |
3696 r2.bit() | r1.bit() | r0.bit());
3697
3698 __ stm(db_w, sp, spill_mask);
3699 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003700 __ mov(r1, Operand(Handle<Name>(name)));
3701 NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003702 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003703 __ cmp(r0, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003704 __ ldm(ia_w, sp, spill_mask);
3705
3706 __ b(eq, done);
3707 __ b(ne, miss);
Ben Murdoch257744e2011-11-30 15:57:28 +00003708}
3709
3710
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003711// Probe the name dictionary in the |elements| register. Jump to the
Ben Murdoch257744e2011-11-30 15:57:28 +00003712// |done| label if a property with the given name is found. Jump to
3713// the |miss| label otherwise.
3714// If lookup was successful |scratch2| will be equal to elements + 4 * index.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003715void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3716 Label* miss,
3717 Label* done,
3718 Register elements,
3719 Register name,
3720 Register scratch1,
3721 Register scratch2) {
3722 DCHECK(!elements.is(scratch1));
3723 DCHECK(!elements.is(scratch2));
3724 DCHECK(!name.is(scratch1));
3725 DCHECK(!name.is(scratch2));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003726
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003727 __ AssertName(name);
Ben Murdoch257744e2011-11-30 15:57:28 +00003728
3729 // Compute the capacity mask.
3730 __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003731 __ SmiUntag(scratch1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003732 __ sub(scratch1, scratch1, Operand(1));
3733
3734 // Generate an unrolled loop that performs a few probes before
3735 // giving up. Measurements done on Gmail indicate that 2 probes
3736 // cover ~93% of loads from dictionaries.
3737 for (int i = 0; i < kInlinedProbes; i++) {
3738 // Compute the masked index: (hash + i + i * i) & mask.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003739 __ ldr(scratch2, FieldMemOperand(name, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003740 if (i > 0) {
3741 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3742 // the hash in a separate instruction. The value hash + i + i * i is right
3743 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003744 DCHECK(NameDictionary::GetProbeOffset(i) <
3745 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003746 __ add(scratch2, scratch2, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003747 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003748 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003749 __ and_(scratch2, scratch1, Operand(scratch2, LSR, Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003750
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003751 // Scale the index by multiplying by the entry size.
3752 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003753 // scratch2 = scratch2 * 3.
3754 __ add(scratch2, scratch2, Operand(scratch2, LSL, 1));
3755
3756 // Check if the key is identical to the name.
3757 __ add(scratch2, elements, Operand(scratch2, LSL, 2));
3758 __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset));
3759 __ cmp(name, Operand(ip));
3760 __ b(eq, done);
3761 }
3762
3763 const int spill_mask =
3764 (lr.bit() | r6.bit() | r5.bit() | r4.bit() |
3765 r3.bit() | r2.bit() | r1.bit() | r0.bit()) &
3766 ~(scratch1.bit() | scratch2.bit());
3767
3768 __ stm(db_w, sp, spill_mask);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003769 if (name.is(r0)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003770 DCHECK(!elements.is(r1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003771 __ Move(r1, name);
3772 __ Move(r0, elements);
3773 } else {
3774 __ Move(r0, elements);
3775 __ Move(r1, name);
3776 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003777 NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
Ben Murdoch257744e2011-11-30 15:57:28 +00003778 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003779 __ cmp(r0, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003780 __ mov(scratch2, Operand(r2));
3781 __ ldm(ia_w, sp, spill_mask);
3782
3783 __ b(ne, done);
3784 __ b(eq, miss);
3785}
3786
3787
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003788void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003789 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3790 // we cannot call anything that could cause a GC from this stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00003791 // Registers:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003792 // result: NameDictionary to probe
Ben Murdoch257744e2011-11-30 15:57:28 +00003793 // r1: key
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003794 // dictionary: NameDictionary to probe.
3795 // index: will hold an index of entry if lookup is successful.
3796 // might alias with result_.
Ben Murdoch257744e2011-11-30 15:57:28 +00003797 // Returns:
3798 // result_ is zero if lookup failed, non zero otherwise.
3799
3800 Register result = r0;
3801 Register dictionary = r0;
3802 Register key = r1;
3803 Register index = r2;
3804 Register mask = r3;
3805 Register hash = r4;
3806 Register undefined = r5;
3807 Register entry_key = r6;
3808
3809 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3810
3811 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003812 __ SmiUntag(mask);
Ben Murdoch257744e2011-11-30 15:57:28 +00003813 __ sub(mask, mask, Operand(1));
3814
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003815 __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003816
3817 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
3818
3819 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3820 // Compute the masked index: (hash + i + i * i) & mask.
3821 // Capacity is smi 2^n.
3822 if (i > 0) {
3823 // Add the probe offset (i + i * i) left shifted to avoid right shifting
3824 // the hash in a separate instruction. The value hash + i + i * i is right
3825 // shifted in the following and instruction.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003826 DCHECK(NameDictionary::GetProbeOffset(i) <
3827 1 << (32 - Name::kHashFieldOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003828 __ add(index, hash, Operand(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003829 NameDictionary::GetProbeOffset(i) << Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003830 } else {
3831 __ mov(index, Operand(hash));
3832 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003833 __ and_(index, mask, Operand(index, LSR, Name::kHashShift));
Ben Murdoch257744e2011-11-30 15:57:28 +00003834
3835 // Scale the index by multiplying by the entry size.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003836 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003837 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
3838
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003839 STATIC_ASSERT(kSmiTagSize == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003840 __ add(index, dictionary, Operand(index, LSL, 2));
3841 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset));
3842
3843 // Having undefined at this place means the name is not contained.
3844 __ cmp(entry_key, Operand(undefined));
3845 __ b(eq, &not_in_dictionary);
3846
3847 // Stop if found the property.
3848 __ cmp(entry_key, Operand(key));
3849 __ b(eq, &in_dictionary);
3850
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003851 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3852 // Check if the entry name is not a unique name.
Ben Murdoch257744e2011-11-30 15:57:28 +00003853 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
3854 __ ldrb(entry_key,
3855 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003856 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
Ben Murdoch257744e2011-11-30 15:57:28 +00003857 }
3858 }
3859
3860 __ bind(&maybe_in_dictionary);
3861 // If we are doing negative lookup then probing failure should be
3862 // treated as a lookup success. For positive lookup probing failure
3863 // should be treated as lookup failure.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003864 if (mode() == POSITIVE_LOOKUP) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003865 __ mov(result, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003866 __ Ret();
3867 }
3868
3869 __ bind(&in_dictionary);
3870 __ mov(result, Operand(1));
3871 __ Ret();
3872
3873 __ bind(&not_in_dictionary);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003874 __ mov(result, Operand::Zero());
Ben Murdoch257744e2011-11-30 15:57:28 +00003875 __ Ret();
3876}
3877
3878
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003879void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3880 Isolate* isolate) {
3881 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3882 stub1.GetCode();
3883 // Hydrogen code stubs need stub2 at snapshot time.
3884 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3885 stub2.GetCode();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003886}
3887
3888
3889// Takes the input in 3 registers: address_ value_ and object_. A pointer to
3890// the value has just been written into the object, now this stub makes sure
3891// we keep the GC informed. The word in the object where the value has been
3892// written is in the address register.
3893void RecordWriteStub::Generate(MacroAssembler* masm) {
3894 Label skip_to_incremental_noncompacting;
3895 Label skip_to_incremental_compacting;
3896
3897 // The first two instructions are generated with labels so as to get the
3898 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3899 // forth between a compare instructions (a nop in this position) and the
3900 // real branch when we start and stop incremental heap marking.
3901 // See RecordWriteStub::Patch for details.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003902 {
3903 // Block literal pool emission, as the position of these two instructions
3904 // is assumed by the patching code.
3905 Assembler::BlockConstPoolScope block_const_pool(masm);
3906 __ b(&skip_to_incremental_noncompacting);
3907 __ b(&skip_to_incremental_compacting);
3908 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003909
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003910 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3911 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003912 MacroAssembler::kReturnAtEnd);
3913 }
3914 __ Ret();
3915
3916 __ bind(&skip_to_incremental_noncompacting);
3917 GenerateIncremental(masm, INCREMENTAL);
3918
3919 __ bind(&skip_to_incremental_compacting);
3920 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3921
3922 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3923 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003924 DCHECK(Assembler::GetBranchOffset(masm->instr_at(0)) < (1 << 12));
3925 DCHECK(Assembler::GetBranchOffset(masm->instr_at(4)) < (1 << 12));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003926 PatchBranchIntoNop(masm, 0);
3927 PatchBranchIntoNop(masm, Assembler::kInstrSize);
3928}
3929
3930
3931void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3932 regs_.Save(masm);
3933
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003934 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003935 Label dont_need_remembered_set;
3936
3937 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
3938 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
3939 regs_.scratch0(),
3940 &dont_need_remembered_set);
3941
3942 __ CheckPageFlag(regs_.object(),
3943 regs_.scratch0(),
3944 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3945 ne,
3946 &dont_need_remembered_set);
3947
3948 // First notify the incremental marker if necessary, then update the
3949 // remembered set.
3950 CheckNeedsToInformIncrementalMarker(
3951 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003952 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003953 regs_.Restore(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003954 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003955 MacroAssembler::kReturnAtEnd);
3956
3957 __ bind(&dont_need_remembered_set);
3958 }
3959
3960 CheckNeedsToInformIncrementalMarker(
3961 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003962 InformIncrementalMarker(masm);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003963 regs_.Restore(masm);
3964 __ Ret();
3965}
3966
3967
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003968void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
3969 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003970 int argument_count = 3;
3971 __ PrepareCallCFunction(argument_count, regs_.scratch0());
3972 Register address =
3973 r0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003974 DCHECK(!address.is(regs_.object()));
3975 DCHECK(!address.is(r0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003976 __ Move(address, regs_.address());
3977 __ Move(r0, regs_.object());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003978 __ Move(r1, address);
3979 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003980
3981 AllowExternalCallThatCantCauseGC scope(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003982 __ CallCFunction(
3983 ExternalReference::incremental_marking_record_write_function(isolate()),
3984 argument_count);
3985 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003986}
3987
3988
3989void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
3990 MacroAssembler* masm,
3991 OnNoNeedToInformIncrementalMarker on_no_need,
3992 Mode mode) {
3993 Label on_black;
3994 Label need_incremental;
3995 Label need_incremental_pop_scratch;
3996
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003997 __ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
3998 __ ldr(regs_.scratch1(),
3999 MemOperand(regs_.scratch0(),
4000 MemoryChunk::kWriteBarrierCounterOffset));
4001 __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC);
4002 __ str(regs_.scratch1(),
4003 MemOperand(regs_.scratch0(),
4004 MemoryChunk::kWriteBarrierCounterOffset));
4005 __ b(mi, &need_incremental);
4006
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004007 // Let's look at the color of the object: If it is not black we don't have
4008 // to inform the incremental marker.
4009 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
4010
4011 regs_.Restore(masm);
4012 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004013 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004014 MacroAssembler::kReturnAtEnd);
4015 } else {
4016 __ Ret();
4017 }
4018
4019 __ bind(&on_black);
4020
4021 // Get the value from the slot.
4022 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0));
4023
4024 if (mode == INCREMENTAL_COMPACTION) {
4025 Label ensure_not_white;
4026
4027 __ CheckPageFlag(regs_.scratch0(), // Contains value.
4028 regs_.scratch1(), // Scratch.
4029 MemoryChunk::kEvacuationCandidateMask,
4030 eq,
4031 &ensure_not_white);
4032
4033 __ CheckPageFlag(regs_.object(),
4034 regs_.scratch1(), // Scratch.
4035 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4036 eq,
4037 &need_incremental);
4038
4039 __ bind(&ensure_not_white);
4040 }
4041
4042 // We need extra registers for this, so we push the object and the address
4043 // register temporarily.
4044 __ Push(regs_.object(), regs_.address());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004045 __ JumpIfWhite(regs_.scratch0(), // The value.
4046 regs_.scratch1(), // Scratch.
4047 regs_.object(), // Scratch.
4048 regs_.address(), // Scratch.
4049 &need_incremental_pop_scratch);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004050 __ Pop(regs_.object(), regs_.address());
4051
4052 regs_.Restore(masm);
4053 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004054 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004055 MacroAssembler::kReturnAtEnd);
4056 } else {
4057 __ Ret();
4058 }
4059
4060 __ bind(&need_incremental_pop_scratch);
4061 __ Pop(regs_.object(), regs_.address());
4062
4063 __ bind(&need_incremental);
4064
4065 // Fall through when we need to inform the incremental marker.
4066}
4067
4068
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004069void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4070 CEntryStub ces(isolate(), 1, kSaveFPRegs);
4071 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
4072 int parameter_count_offset =
4073 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4074 __ ldr(r1, MemOperand(fp, parameter_count_offset));
4075 if (function_mode() == JS_FUNCTION_STUB_MODE) {
4076 __ add(r1, r1, Operand(1));
4077 }
4078 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4079 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2));
4080 __ add(sp, sp, r1);
4081 __ Ret();
4082}
4083
4084
4085void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004086 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4087 LoadICStub stub(isolate(), state());
4088 stub.GenerateForTrampoline(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004089}
4090
4091
4092void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004093 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4094 KeyedLoadICStub stub(isolate(), state());
4095 stub.GenerateForTrampoline(masm);
4096}
4097
4098
4099void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4100 __ EmitLoadTypeFeedbackVector(r2);
4101 CallICStub stub(isolate(), state());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004102 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4103}
4104
4105
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004106void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4107
4108
4109void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4110 GenerateImpl(masm, true);
4111}
4112
4113
4114static void HandleArrayCases(MacroAssembler* masm, Register feedback,
4115 Register receiver_map, Register scratch1,
4116 Register scratch2, bool is_polymorphic,
4117 Label* miss) {
4118 // feedback initially contains the feedback array
4119 Label next_loop, prepare_next;
4120 Label start_polymorphic;
4121
4122 Register cached_map = scratch1;
4123
4124 __ ldr(cached_map,
4125 FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0)));
4126 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
4127 __ cmp(receiver_map, cached_map);
4128 __ b(ne, &start_polymorphic);
4129 // found, now call handler.
4130 Register handler = feedback;
4131 __ ldr(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1)));
4132 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
4133
4134
4135 Register length = scratch2;
4136 __ bind(&start_polymorphic);
4137 __ ldr(length, FieldMemOperand(feedback, FixedArray::kLengthOffset));
4138 if (!is_polymorphic) {
4139 // If the IC could be monomorphic we have to make sure we don't go past the
4140 // end of the feedback array.
4141 __ cmp(length, Operand(Smi::FromInt(2)));
4142 __ b(eq, miss);
4143 }
4144
4145 Register too_far = length;
4146 Register pointer_reg = feedback;
4147
4148 // +-----+------+------+-----+-----+ ... ----+
4149 // | map | len | wm0 | h0 | wm1 | hN |
4150 // +-----+------+------+-----+-----+ ... ----+
4151 // 0 1 2 len-1
4152 // ^ ^
4153 // | |
4154 // pointer_reg too_far
4155 // aka feedback scratch2
4156 // also need receiver_map
4157 // use cached_map (scratch1) to look in the weak map values.
4158 __ add(too_far, feedback, Operand::PointerOffsetFromSmiKey(length));
4159 __ add(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4160 __ add(pointer_reg, feedback,
4161 Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag));
4162
4163 __ bind(&next_loop);
4164 __ ldr(cached_map, MemOperand(pointer_reg));
4165 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
4166 __ cmp(receiver_map, cached_map);
4167 __ b(ne, &prepare_next);
4168 __ ldr(handler, MemOperand(pointer_reg, kPointerSize));
4169 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
4170
4171 __ bind(&prepare_next);
4172 __ add(pointer_reg, pointer_reg, Operand(kPointerSize * 2));
4173 __ cmp(pointer_reg, too_far);
4174 __ b(lt, &next_loop);
4175
4176 // We exhausted our array of map handler pairs.
4177 __ jmp(miss);
4178}
4179
4180
4181static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
4182 Register receiver_map, Register feedback,
4183 Register vector, Register slot,
4184 Register scratch, Label* compare_map,
4185 Label* load_smi_map, Label* try_array) {
4186 __ JumpIfSmi(receiver, load_smi_map);
4187 __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
4188 __ bind(compare_map);
4189 Register cached_map = scratch;
4190 // Move the weak map into the weak_cell register.
4191 __ ldr(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset));
4192 __ cmp(cached_map, receiver_map);
4193 __ b(ne, try_array);
4194 Register handler = feedback;
4195 __ add(handler, vector, Operand::PointerOffsetFromSmiKey(slot));
4196 __ ldr(handler,
4197 FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
4198 __ add(pc, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
4199}
4200
4201
4202void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4203 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
4204 Register name = LoadWithVectorDescriptor::NameRegister(); // r2
4205 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
4206 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
4207 Register feedback = r4;
4208 Register receiver_map = r5;
4209 Register scratch1 = r6;
4210
4211 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4212 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4213
4214 // Try to quickly handle the monomorphic case without knowing for sure
4215 // if we have a weak cell in feedback. We do know it's safe to look
4216 // at WeakCell::kValueOffset.
4217 Label try_array, load_smi_map, compare_map;
4218 Label not_array, miss;
4219 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4220 scratch1, &compare_map, &load_smi_map, &try_array);
4221
4222 // Is it a fixed array?
4223 __ bind(&try_array);
4224 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4225 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
4226 __ b(ne, &not_array);
4227 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, true, &miss);
4228
4229 __ bind(&not_array);
4230 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4231 __ b(ne, &miss);
4232 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4233 Code::ComputeHandlerFlags(Code::LOAD_IC));
4234 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
4235 receiver, name, feedback,
4236 receiver_map, scratch1, r9);
4237
4238 __ bind(&miss);
4239 LoadIC::GenerateMiss(masm);
4240
4241 __ bind(&load_smi_map);
4242 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4243 __ jmp(&compare_map);
4244}
4245
4246
4247void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4248 GenerateImpl(masm, false);
4249}
4250
4251
4252void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4253 GenerateImpl(masm, true);
4254}
4255
4256
4257void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4258 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // r1
4259 Register key = LoadWithVectorDescriptor::NameRegister(); // r2
4260 Register vector = LoadWithVectorDescriptor::VectorRegister(); // r3
4261 Register slot = LoadWithVectorDescriptor::SlotRegister(); // r0
4262 Register feedback = r4;
4263 Register receiver_map = r5;
4264 Register scratch1 = r6;
4265
4266 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4267 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4268
4269 // Try to quickly handle the monomorphic case without knowing for sure
4270 // if we have a weak cell in feedback. We do know it's safe to look
4271 // at WeakCell::kValueOffset.
4272 Label try_array, load_smi_map, compare_map;
4273 Label not_array, miss;
4274 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4275 scratch1, &compare_map, &load_smi_map, &try_array);
4276
4277 __ bind(&try_array);
4278 // Is it a fixed array?
4279 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4280 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
4281 __ b(ne, &not_array);
4282
4283 // We have a polymorphic element handler.
4284 Label polymorphic, try_poly_name;
4285 __ bind(&polymorphic);
4286 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, true, &miss);
4287
4288 __ bind(&not_array);
4289 // Is it generic?
4290 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4291 __ b(ne, &try_poly_name);
4292 Handle<Code> megamorphic_stub =
4293 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4294 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
4295
4296 __ bind(&try_poly_name);
4297 // We might have a name in feedback, and a fixed array in the next slot.
4298 __ cmp(key, feedback);
4299 __ b(ne, &miss);
4300 // If the name comparison succeeded, we know we have a fixed array with
4301 // at least one map/handler pair.
4302 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4303 __ ldr(feedback,
4304 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
4305 HandleArrayCases(masm, feedback, receiver_map, scratch1, r9, false, &miss);
4306
4307 __ bind(&miss);
4308 KeyedLoadIC::GenerateMiss(masm);
4309
4310 __ bind(&load_smi_map);
4311 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4312 __ jmp(&compare_map);
4313}
4314
4315
4316void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4317 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4318 VectorStoreICStub stub(isolate(), state());
4319 stub.GenerateForTrampoline(masm);
4320}
4321
4322
4323void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4324 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4325 VectorKeyedStoreICStub stub(isolate(), state());
4326 stub.GenerateForTrampoline(masm);
4327}
4328
4329
4330void VectorStoreICStub::Generate(MacroAssembler* masm) {
4331 GenerateImpl(masm, false);
4332}
4333
4334
4335void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4336 GenerateImpl(masm, true);
4337}
4338
4339
4340void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4341 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
4342 Register key = VectorStoreICDescriptor::NameRegister(); // r2
4343 Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
4344 Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
4345 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
4346 Register feedback = r5;
4347 Register receiver_map = r6;
4348 Register scratch1 = r9;
4349
4350 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4351 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4352
4353 // Try to quickly handle the monomorphic case without knowing for sure
4354 // if we have a weak cell in feedback. We do know it's safe to look
4355 // at WeakCell::kValueOffset.
4356 Label try_array, load_smi_map, compare_map;
4357 Label not_array, miss;
4358 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4359 scratch1, &compare_map, &load_smi_map, &try_array);
4360
4361 // Is it a fixed array?
4362 __ bind(&try_array);
4363 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4364 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
4365 __ b(ne, &not_array);
4366
4367 // We are using register r8, which is used for the embedded constant pool
4368 // when FLAG_enable_embedded_constant_pool is true.
4369 DCHECK(!FLAG_enable_embedded_constant_pool);
4370 Register scratch2 = r8;
4371 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, true,
4372 &miss);
4373
4374 __ bind(&not_array);
4375 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4376 __ b(ne, &miss);
4377 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4378 Code::ComputeHandlerFlags(Code::STORE_IC));
4379 masm->isolate()->stub_cache()->GenerateProbe(
4380 masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map,
4381 scratch1, scratch2);
4382
4383 __ bind(&miss);
4384 StoreIC::GenerateMiss(masm);
4385
4386 __ bind(&load_smi_map);
4387 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4388 __ jmp(&compare_map);
4389}
4390
4391
4392void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
4393 GenerateImpl(masm, false);
4394}
4395
4396
4397void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4398 GenerateImpl(masm, true);
4399}
4400
4401
4402static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
4403 Register receiver_map, Register scratch1,
4404 Register scratch2, Label* miss) {
4405 // feedback initially contains the feedback array
4406 Label next_loop, prepare_next;
4407 Label start_polymorphic;
4408 Label transition_call;
4409
4410 Register cached_map = scratch1;
4411 Register too_far = scratch2;
4412 Register pointer_reg = feedback;
4413 __ ldr(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset));
4414
4415 // +-----+------+------+-----+-----+-----+ ... ----+
4416 // | map | len | wm0 | wt0 | h0 | wm1 | hN |
4417 // +-----+------+------+-----+-----+ ----+ ... ----+
4418 // 0 1 2 len-1
4419 // ^ ^
4420 // | |
4421 // pointer_reg too_far
4422 // aka feedback scratch2
4423 // also need receiver_map
4424 // use cached_map (scratch1) to look in the weak map values.
4425 __ add(too_far, feedback, Operand::PointerOffsetFromSmiKey(too_far));
4426 __ add(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4427 __ add(pointer_reg, feedback,
4428 Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag));
4429
4430 __ bind(&next_loop);
4431 __ ldr(cached_map, MemOperand(pointer_reg));
4432 __ ldr(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
4433 __ cmp(receiver_map, cached_map);
4434 __ b(ne, &prepare_next);
4435 // Is it a transitioning store?
4436 __ ldr(too_far, MemOperand(pointer_reg, kPointerSize));
4437 __ CompareRoot(too_far, Heap::kUndefinedValueRootIndex);
4438 __ b(ne, &transition_call);
4439 __ ldr(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2));
4440 __ add(pc, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
4441
4442 __ bind(&transition_call);
4443 __ ldr(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset));
4444 __ JumpIfSmi(too_far, miss);
4445
4446 __ ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
4447
4448 // Load the map into the correct register.
4449 DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
4450 __ mov(feedback, too_far);
4451
4452 __ add(pc, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
4453
4454 __ bind(&prepare_next);
4455 __ add(pointer_reg, pointer_reg, Operand(kPointerSize * 3));
4456 __ cmp(pointer_reg, too_far);
4457 __ b(lt, &next_loop);
4458
4459 // We exhausted our array of map handler pairs.
4460 __ jmp(miss);
4461}
4462
4463
4464void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4465 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
4466 Register key = VectorStoreICDescriptor::NameRegister(); // r2
4467 Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
4468 Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
4469 DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
4470 Register feedback = r5;
4471 Register receiver_map = r6;
4472 Register scratch1 = r9;
4473
4474 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4475 __ ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
4476
4477 // Try to quickly handle the monomorphic case without knowing for sure
4478 // if we have a weak cell in feedback. We do know it's safe to look
4479 // at WeakCell::kValueOffset.
4480 Label try_array, load_smi_map, compare_map;
4481 Label not_array, miss;
4482 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
4483 scratch1, &compare_map, &load_smi_map, &try_array);
4484
4485 __ bind(&try_array);
4486 // Is it a fixed array?
4487 __ ldr(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
4488 __ CompareRoot(scratch1, Heap::kFixedArrayMapRootIndex);
4489 __ b(ne, &not_array);
4490
4491 // We have a polymorphic element handler.
4492 Label polymorphic, try_poly_name;
4493 __ bind(&polymorphic);
4494
4495 // We are using register r8, which is used for the embedded constant pool
4496 // when FLAG_enable_embedded_constant_pool is true.
4497 DCHECK(!FLAG_enable_embedded_constant_pool);
4498 Register scratch2 = r8;
4499
4500 HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2,
4501 &miss);
4502
4503 __ bind(&not_array);
4504 // Is it generic?
4505 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4506 __ b(ne, &try_poly_name);
4507 Handle<Code> megamorphic_stub =
4508 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4509 __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
4510
4511 __ bind(&try_poly_name);
4512 // We might have a name in feedback, and a fixed array in the next slot.
4513 __ cmp(key, feedback);
4514 __ b(ne, &miss);
4515 // If the name comparison succeeded, we know we have a fixed array with
4516 // at least one map/handler pair.
4517 __ add(feedback, vector, Operand::PointerOffsetFromSmiKey(slot));
4518 __ ldr(feedback,
4519 FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
4520 HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false,
4521 &miss);
4522
4523 __ bind(&miss);
4524 KeyedStoreIC::GenerateMiss(masm);
4525
4526 __ bind(&load_smi_map);
4527 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4528 __ jmp(&compare_map);
4529}
4530
4531
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004532void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4533 if (masm->isolate()->function_entry_hook() != NULL) {
4534 ProfileEntryHookStub stub(masm->isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004535 PredictableCodeSizeScope predictable(masm);
4536 predictable.ExpectSize(masm->CallStubSize(&stub) +
4537 2 * Assembler::kInstrSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004538 __ push(lr);
4539 __ CallStub(&stub);
4540 __ pop(lr);
4541 }
4542}
4543
4544
4545void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4546 // The entry hook is a "push lr" instruction, followed by a call.
4547 const int32_t kReturnAddressDistanceFromFunctionStart =
4548 3 * Assembler::kInstrSize;
4549
4550 // This should contain all kCallerSaved registers.
4551 const RegList kSavedRegs =
4552 1 << 0 | // r0
4553 1 << 1 | // r1
4554 1 << 2 | // r2
4555 1 << 3 | // r3
4556 1 << 5 | // r5
4557 1 << 9; // r9
4558 // We also save lr, so the count here is one higher than the mask indicates.
4559 const int32_t kNumSavedRegs = 7;
4560
4561 DCHECK((kCallerSaved & kSavedRegs) == kCallerSaved);
4562
4563 // Save all caller-save registers as this may be called from anywhere.
4564 __ stm(db_w, sp, kSavedRegs | lr.bit());
4565
4566 // Compute the function's address for the first argument.
4567 __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart));
4568
4569 // The caller's return address is above the saved temporaries.
4570 // Grab that for the second argument to the hook.
4571 __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize));
4572
4573 // Align the stack if necessary.
4574 int frame_alignment = masm->ActivationFrameAlignment();
4575 if (frame_alignment > kPointerSize) {
4576 __ mov(r5, sp);
4577 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
4578 __ and_(sp, sp, Operand(-frame_alignment));
4579 }
4580
4581#if V8_HOST_ARCH_ARM
4582 int32_t entry_hook =
4583 reinterpret_cast<int32_t>(isolate()->function_entry_hook());
4584 __ mov(ip, Operand(entry_hook));
4585#else
4586 // Under the simulator we need to indirect the entry hook through a
4587 // trampoline function at a known address.
4588 // It additionally takes an isolate as a third parameter
4589 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
4590
4591 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
4592 __ mov(ip, Operand(ExternalReference(&dispatcher,
4593 ExternalReference::BUILTIN_CALL,
4594 isolate())));
4595#endif
4596 __ Call(ip);
4597
4598 // Restore the stack pointer if needed.
4599 if (frame_alignment > kPointerSize) {
4600 __ mov(sp, r5);
4601 }
4602
4603 // Also pop pc to get Ret(0).
4604 __ ldm(ia_w, sp, kSavedRegs | pc.bit());
4605}
4606
4607
4608template<class T>
4609static void CreateArrayDispatch(MacroAssembler* masm,
4610 AllocationSiteOverrideMode mode) {
4611 if (mode == DISABLE_ALLOCATION_SITES) {
4612 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4613 __ TailCallStub(&stub);
4614 } else if (mode == DONT_OVERRIDE) {
4615 int last_index = GetSequenceIndexFromFastElementsKind(
4616 TERMINAL_FAST_ELEMENTS_KIND);
4617 for (int i = 0; i <= last_index; ++i) {
4618 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4619 __ cmp(r3, Operand(kind));
4620 T stub(masm->isolate(), kind);
4621 __ TailCallStub(&stub, eq);
4622 }
4623
4624 // If we reached this point there is a problem.
4625 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4626 } else {
4627 UNREACHABLE();
4628 }
4629}
4630
4631
4632static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4633 AllocationSiteOverrideMode mode) {
4634 // r2 - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4635 // r3 - kind (if mode != DISABLE_ALLOCATION_SITES)
4636 // r0 - number of arguments
4637 // r1 - constructor?
4638 // sp[0] - last argument
4639 Label normal_sequence;
4640 if (mode == DONT_OVERRIDE) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004641 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4642 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4643 STATIC_ASSERT(FAST_ELEMENTS == 2);
4644 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4645 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4646 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004647
4648 // is the low bit set? If so, we are holey and that is good.
4649 __ tst(r3, Operand(1));
4650 __ b(ne, &normal_sequence);
4651 }
4652
4653 // look at the first argument
4654 __ ldr(r5, MemOperand(sp, 0));
4655 __ cmp(r5, Operand::Zero());
4656 __ b(eq, &normal_sequence);
4657
4658 if (mode == DISABLE_ALLOCATION_SITES) {
4659 ElementsKind initial = GetInitialFastElementsKind();
4660 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4661
4662 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4663 holey_initial,
4664 DISABLE_ALLOCATION_SITES);
4665 __ TailCallStub(&stub_holey);
4666
4667 __ bind(&normal_sequence);
4668 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4669 initial,
4670 DISABLE_ALLOCATION_SITES);
4671 __ TailCallStub(&stub);
4672 } else if (mode == DONT_OVERRIDE) {
4673 // We are going to create a holey array, but our kind is non-holey.
4674 // Fix kind and retry (only if we have an allocation site in the slot).
4675 __ add(r3, r3, Operand(1));
4676
4677 if (FLAG_debug_code) {
4678 __ ldr(r5, FieldMemOperand(r2, 0));
4679 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
4680 __ Assert(eq, kExpectedAllocationSite);
4681 }
4682
4683 // Save the resulting elements kind in type info. We can't just store r3
4684 // in the AllocationSite::transition_info field because elements kind is
4685 // restricted to a portion of the field...upper bits need to be left alone.
4686 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4687 __ ldr(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4688 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
4689 __ str(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4690
4691 __ bind(&normal_sequence);
4692 int last_index = GetSequenceIndexFromFastElementsKind(
4693 TERMINAL_FAST_ELEMENTS_KIND);
4694 for (int i = 0; i <= last_index; ++i) {
4695 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4696 __ cmp(r3, Operand(kind));
4697 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4698 __ TailCallStub(&stub, eq);
4699 }
4700
4701 // If we reached this point there is a problem.
4702 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4703 } else {
4704 UNREACHABLE();
4705 }
4706}
4707
4708
4709template<class T>
4710static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4711 int to_index = GetSequenceIndexFromFastElementsKind(
4712 TERMINAL_FAST_ELEMENTS_KIND);
4713 for (int i = 0; i <= to_index; ++i) {
4714 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4715 T stub(isolate, kind);
4716 stub.GetCode();
4717 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4718 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4719 stub1.GetCode();
4720 }
4721 }
4722}
4723
4724
4725void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4726 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4727 isolate);
4728 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4729 isolate);
4730 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4731 isolate);
4732}
4733
4734
4735void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4736 Isolate* isolate) {
4737 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4738 for (int i = 0; i < 2; i++) {
4739 // For internal arrays we only need a few things
4740 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4741 stubh1.GetCode();
4742 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4743 stubh2.GetCode();
4744 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4745 stubh3.GetCode();
4746 }
4747}
4748
4749
4750void ArrayConstructorStub::GenerateDispatchToArrayStub(
4751 MacroAssembler* masm,
4752 AllocationSiteOverrideMode mode) {
4753 if (argument_count() == ANY) {
4754 Label not_zero_case, not_one_case;
4755 __ tst(r0, r0);
4756 __ b(ne, &not_zero_case);
4757 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4758
4759 __ bind(&not_zero_case);
4760 __ cmp(r0, Operand(1));
4761 __ b(gt, &not_one_case);
4762 CreateArrayDispatchOneArgument(masm, mode);
4763
4764 __ bind(&not_one_case);
4765 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4766 } else if (argument_count() == NONE) {
4767 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4768 } else if (argument_count() == ONE) {
4769 CreateArrayDispatchOneArgument(masm, mode);
4770 } else if (argument_count() == MORE_THAN_ONE) {
4771 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4772 } else {
4773 UNREACHABLE();
4774 }
4775}
4776
4777
4778void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4779 // ----------- S t a t e -------------
4780 // -- r0 : argc (only if argument_count() == ANY)
4781 // -- r1 : constructor
4782 // -- r2 : AllocationSite or undefined
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004783 // -- r3 : new target
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004784 // -- sp[0] : return address
4785 // -- sp[4] : last argument
4786 // -----------------------------------
4787
4788 if (FLAG_debug_code) {
4789 // The array construct code is only set for the global and natives
4790 // builtin Array functions which always have maps.
4791
4792 // Initial map for the builtin Array function should be a map.
4793 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4794 // Will both indicate a NULL and a Smi.
4795 __ tst(r4, Operand(kSmiTagMask));
4796 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
4797 __ CompareObjectType(r4, r4, r5, MAP_TYPE);
4798 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
4799
4800 // We should either have undefined in r2 or a valid AllocationSite
4801 __ AssertUndefinedOrAllocationSite(r2, r4);
4802 }
4803
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004804 // Enter the context of the Array function.
4805 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
4806
4807 Label subclassing;
4808 __ cmp(r3, r1);
4809 __ b(ne, &subclassing);
4810
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004811 Label no_info;
4812 // Get the elements kind and case on that.
4813 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
4814 __ b(eq, &no_info);
4815
4816 __ ldr(r3, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset));
4817 __ SmiUntag(r3);
4818 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4819 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
4820 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4821
4822 __ bind(&no_info);
4823 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004824
4825 __ bind(&subclassing);
4826 switch (argument_count()) {
4827 case ANY:
4828 case MORE_THAN_ONE:
4829 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
4830 __ add(r0, r0, Operand(3));
4831 break;
4832 case NONE:
4833 __ str(r1, MemOperand(sp, 0 * kPointerSize));
4834 __ mov(r0, Operand(3));
4835 break;
4836 case ONE:
4837 __ str(r1, MemOperand(sp, 1 * kPointerSize));
4838 __ mov(r0, Operand(4));
4839 break;
4840 }
4841 __ Push(r3, r2);
4842 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004843}
4844
4845
4846void InternalArrayConstructorStub::GenerateCase(
4847 MacroAssembler* masm, ElementsKind kind) {
4848 __ cmp(r0, Operand(1));
4849
4850 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4851 __ TailCallStub(&stub0, lo);
4852
4853 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4854 __ TailCallStub(&stubN, hi);
4855
4856 if (IsFastPackedElementsKind(kind)) {
4857 // We might need to create a holey array
4858 // look at the first argument
4859 __ ldr(r3, MemOperand(sp, 0));
4860 __ cmp(r3, Operand::Zero());
4861
4862 InternalArraySingleArgumentConstructorStub
4863 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4864 __ TailCallStub(&stub1_holey, ne);
4865 }
4866
4867 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4868 __ TailCallStub(&stub1);
4869}
4870
4871
4872void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4873 // ----------- S t a t e -------------
4874 // -- r0 : argc
4875 // -- r1 : constructor
4876 // -- sp[0] : return address
4877 // -- sp[4] : last argument
4878 // -----------------------------------
4879
4880 if (FLAG_debug_code) {
4881 // The array construct code is only set for the global and natives
4882 // builtin Array functions which always have maps.
4883
4884 // Initial map for the builtin Array function should be a map.
4885 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4886 // Will both indicate a NULL and a Smi.
4887 __ tst(r3, Operand(kSmiTagMask));
4888 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
4889 __ CompareObjectType(r3, r3, r4, MAP_TYPE);
4890 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
4891 }
4892
4893 // Figure out the right elements kind
4894 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
4895 // Load the map's "bit field 2" into |result|. We only need the first byte,
4896 // but the following bit field extraction takes care of that anyway.
4897 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset));
4898 // Retrieve elements_kind from bit field 2.
4899 __ DecodeField<Map::ElementsKindBits>(r3);
4900
4901 if (FLAG_debug_code) {
4902 Label done;
4903 __ cmp(r3, Operand(FAST_ELEMENTS));
4904 __ b(eq, &done);
4905 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
4906 __ Assert(eq,
4907 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4908 __ bind(&done);
4909 }
4910
4911 Label fast_elements_case;
4912 __ cmp(r3, Operand(FAST_ELEMENTS));
4913 __ b(eq, &fast_elements_case);
4914 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4915
4916 __ bind(&fast_elements_case);
4917 GenerateCase(masm, FAST_ELEMENTS);
4918}
4919
4920
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00004921void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
4922 Register context = cp;
4923 Register result = r0;
4924 Register slot = r2;
4925
4926 // Go up the context chain to the script context.
4927 for (int i = 0; i < depth(); ++i) {
4928 __ ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX));
4929 context = result;
4930 }
4931
4932 // Load the PropertyCell value at the specified slot.
4933 __ add(result, context, Operand(slot, LSL, kPointerSizeLog2));
4934 __ ldr(result, ContextMemOperand(result));
4935 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
4936
4937 // If the result is not the_hole, return. Otherwise, handle in the runtime.
4938 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
4939 __ Ret(ne);
4940
4941 // Fallback to runtime.
4942 __ SmiTag(slot);
4943 __ push(slot);
4944 __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
4945}
4946
4947
4948void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
4949 Register value = r0;
4950 Register slot = r2;
4951
4952 Register cell = r1;
4953 Register cell_details = r4;
4954 Register cell_value = r5;
4955 Register cell_value_map = r6;
4956 Register scratch = r9;
4957
4958 Register context = cp;
4959 Register context_temp = cell;
4960
4961 Label fast_heapobject_case, fast_smi_case, slow_case;
4962
4963 if (FLAG_debug_code) {
4964 __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
4965 __ Check(ne, kUnexpectedValue);
4966 }
4967
4968 // Go up the context chain to the script context.
4969 for (int i = 0; i < depth(); i++) {
4970 __ ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX));
4971 context = context_temp;
4972 }
4973
4974 // Load the PropertyCell at the specified slot.
4975 __ add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
4976 __ ldr(cell, ContextMemOperand(cell));
4977
4978 // Load PropertyDetails for the cell (actually only the cell_type and kind).
4979 __ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
4980 __ SmiUntag(cell_details);
4981 __ and_(cell_details, cell_details,
4982 Operand(PropertyDetails::PropertyCellTypeField::kMask |
4983 PropertyDetails::KindField::kMask |
4984 PropertyDetails::kAttributesReadOnlyMask));
4985
4986 // Check if PropertyCell holds mutable data.
4987 Label not_mutable_data;
4988 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
4989 PropertyCellType::kMutable) |
4990 PropertyDetails::KindField::encode(kData)));
4991 __ b(ne, &not_mutable_data);
4992 __ JumpIfSmi(value, &fast_smi_case);
4993
4994 __ bind(&fast_heapobject_case);
4995 __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
4996 // RecordWriteField clobbers the value register, so we copy it before the
4997 // call.
4998 __ mov(r4, Operand(value));
4999 __ RecordWriteField(cell, PropertyCell::kValueOffset, r4, scratch,
5000 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5001 OMIT_SMI_CHECK);
5002 __ Ret();
5003
5004 __ bind(&not_mutable_data);
5005 // Check if PropertyCell value matches the new value (relevant for Constant,
5006 // ConstantType and Undefined cells).
5007 Label not_same_value;
5008 __ ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
5009 __ cmp(cell_value, value);
5010 __ b(ne, &not_same_value);
5011
5012 // Make sure the PropertyCell is not marked READ_ONLY.
5013 __ tst(cell_details, Operand(PropertyDetails::kAttributesReadOnlyMask));
5014 __ b(ne, &slow_case);
5015
5016 if (FLAG_debug_code) {
5017 Label done;
5018 // This can only be true for Constant, ConstantType and Undefined cells,
5019 // because we never store the_hole via this stub.
5020 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5021 PropertyCellType::kConstant) |
5022 PropertyDetails::KindField::encode(kData)));
5023 __ b(eq, &done);
5024 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5025 PropertyCellType::kConstantType) |
5026 PropertyDetails::KindField::encode(kData)));
5027 __ b(eq, &done);
5028 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5029 PropertyCellType::kUndefined) |
5030 PropertyDetails::KindField::encode(kData)));
5031 __ Check(eq, kUnexpectedValue);
5032 __ bind(&done);
5033 }
5034 __ Ret();
5035 __ bind(&not_same_value);
5036
5037 // Check if PropertyCell contains data with constant type (and is not
5038 // READ_ONLY).
5039 __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
5040 PropertyCellType::kConstantType) |
5041 PropertyDetails::KindField::encode(kData)));
5042 __ b(ne, &slow_case);
5043
5044 // Now either both old and new values must be smis or both must be heap
5045 // objects with same map.
5046 Label value_is_heap_object;
5047 __ JumpIfNotSmi(value, &value_is_heap_object);
5048 __ JumpIfNotSmi(cell_value, &slow_case);
5049 // Old and new values are smis, no need for a write barrier here.
5050 __ bind(&fast_smi_case);
5051 __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
5052 __ Ret();
5053
5054 __ bind(&value_is_heap_object);
5055 __ JumpIfSmi(cell_value, &slow_case);
5056
5057 __ ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
5058 __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
5059 __ cmp(cell_value_map, scratch);
5060 __ b(eq, &fast_heapobject_case);
5061
5062 // Fallback to runtime.
5063 __ bind(&slow_case);
5064 __ SmiTag(slot);
5065 __ Push(slot, value);
5066 __ TailCallRuntime(is_strict(language_mode())
5067 ? Runtime::kStoreGlobalViaContext_Strict
5068 : Runtime::kStoreGlobalViaContext_Sloppy);
5069}
5070
5071
5072static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
5073 return ref0.address() - ref1.address();
5074}
5075
5076
5077// Calls an API function. Allocates HandleScope, extracts returned value
5078// from handle and propagates exceptions. Restores context. stack_space
5079// - space to be unwound on exit (includes the call JS arguments space and
5080// the additional space allocated for the fast call).
5081static void CallApiFunctionAndReturn(MacroAssembler* masm,
5082 Register function_address,
5083 ExternalReference thunk_ref,
5084 int stack_space,
5085 MemOperand* stack_space_operand,
5086 MemOperand return_value_operand,
5087 MemOperand* context_restore_operand) {
5088 Isolate* isolate = masm->isolate();
5089 ExternalReference next_address =
5090 ExternalReference::handle_scope_next_address(isolate);
5091 const int kNextOffset = 0;
5092 const int kLimitOffset = AddressOffset(
5093 ExternalReference::handle_scope_limit_address(isolate), next_address);
5094 const int kLevelOffset = AddressOffset(
5095 ExternalReference::handle_scope_level_address(isolate), next_address);
5096
5097 DCHECK(function_address.is(r1) || function_address.is(r2));
5098
5099 Label profiler_disabled;
5100 Label end_profiler_check;
5101 __ mov(r9, Operand(ExternalReference::is_profiling_address(isolate)));
5102 __ ldrb(r9, MemOperand(r9, 0));
5103 __ cmp(r9, Operand(0));
5104 __ b(eq, &profiler_disabled);
5105
5106 // Additional parameter is the address of the actual callback.
5107 __ mov(r3, Operand(thunk_ref));
5108 __ jmp(&end_profiler_check);
5109
5110 __ bind(&profiler_disabled);
5111 __ Move(r3, function_address);
5112 __ bind(&end_profiler_check);
5113
5114 // Allocate HandleScope in callee-save registers.
5115 __ mov(r9, Operand(next_address));
5116 __ ldr(r4, MemOperand(r9, kNextOffset));
5117 __ ldr(r5, MemOperand(r9, kLimitOffset));
5118 __ ldr(r6, MemOperand(r9, kLevelOffset));
5119 __ add(r6, r6, Operand(1));
5120 __ str(r6, MemOperand(r9, kLevelOffset));
5121
5122 if (FLAG_log_timer_events) {
5123 FrameScope frame(masm, StackFrame::MANUAL);
5124 __ PushSafepointRegisters();
5125 __ PrepareCallCFunction(1, r0);
5126 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5127 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5128 1);
5129 __ PopSafepointRegisters();
5130 }
5131
5132 // Native call returns to the DirectCEntry stub which redirects to the
5133 // return address pushed on stack (could have moved after GC).
5134 // DirectCEntry stub itself is generated early and never moves.
5135 DirectCEntryStub stub(isolate);
5136 stub.GenerateCall(masm, r3);
5137
5138 if (FLAG_log_timer_events) {
5139 FrameScope frame(masm, StackFrame::MANUAL);
5140 __ PushSafepointRegisters();
5141 __ PrepareCallCFunction(1, r0);
5142 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5143 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5144 1);
5145 __ PopSafepointRegisters();
5146 }
5147
5148 Label promote_scheduled_exception;
5149 Label delete_allocated_handles;
5150 Label leave_exit_frame;
5151 Label return_value_loaded;
5152
5153 // load value from ReturnValue
5154 __ ldr(r0, return_value_operand);
5155 __ bind(&return_value_loaded);
5156 // No more valid handles (the result handle was the last one). Restore
5157 // previous handle scope.
5158 __ str(r4, MemOperand(r9, kNextOffset));
5159 if (__ emit_debug_code()) {
5160 __ ldr(r1, MemOperand(r9, kLevelOffset));
5161 __ cmp(r1, r6);
5162 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
5163 }
5164 __ sub(r6, r6, Operand(1));
5165 __ str(r6, MemOperand(r9, kLevelOffset));
5166 __ ldr(ip, MemOperand(r9, kLimitOffset));
5167 __ cmp(r5, ip);
5168 __ b(ne, &delete_allocated_handles);
5169
5170 // Leave the API exit frame.
5171 __ bind(&leave_exit_frame);
5172 bool restore_context = context_restore_operand != NULL;
5173 if (restore_context) {
5174 __ ldr(cp, *context_restore_operand);
5175 }
5176 // LeaveExitFrame expects unwind space to be in a register.
5177 if (stack_space_operand != NULL) {
5178 __ ldr(r4, *stack_space_operand);
5179 } else {
5180 __ mov(r4, Operand(stack_space));
5181 }
5182 __ LeaveExitFrame(false, r4, !restore_context, stack_space_operand != NULL);
5183
5184 // Check if the function scheduled an exception.
5185 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
5186 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate)));
5187 __ ldr(r5, MemOperand(ip));
5188 __ cmp(r4, r5);
5189 __ b(ne, &promote_scheduled_exception);
5190
5191 __ mov(pc, lr);
5192
5193 // Re-throw by promoting a scheduled exception.
5194 __ bind(&promote_scheduled_exception);
5195 __ TailCallRuntime(Runtime::kPromoteScheduledException);
5196
5197 // HandleScope limit has changed. Delete allocated extensions.
5198 __ bind(&delete_allocated_handles);
5199 __ str(r5, MemOperand(r9, kLimitOffset));
5200 __ mov(r4, r0);
5201 __ PrepareCallCFunction(1, r5);
5202 __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
5203 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
5204 1);
5205 __ mov(r0, r4);
5206 __ jmp(&leave_exit_frame);
5207}
5208
5209
5210static void CallApiFunctionStubHelper(MacroAssembler* masm,
5211 const ParameterCount& argc,
5212 bool return_first_arg,
5213 bool call_data_undefined) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005214 // ----------- S t a t e -------------
5215 // -- r0 : callee
5216 // -- r4 : call_data
5217 // -- r2 : holder
5218 // -- r1 : api_function_address
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005219 // -- r3 : number of arguments if argc is a register
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005220 // -- cp : context
5221 // --
5222 // -- sp[0] : last argument
5223 // -- ...
5224 // -- sp[(argc - 1)* 4] : first argument
5225 // -- sp[argc * 4] : receiver
5226 // -----------------------------------
5227
5228 Register callee = r0;
5229 Register call_data = r4;
5230 Register holder = r2;
5231 Register api_function_address = r1;
5232 Register context = cp;
5233
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005234 typedef FunctionCallbackArguments FCA;
5235
5236 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5237 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5238 STATIC_ASSERT(FCA::kDataIndex == 4);
5239 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5240 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5241 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5242 STATIC_ASSERT(FCA::kHolderIndex == 0);
5243 STATIC_ASSERT(FCA::kArgsLength == 7);
5244
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005245 DCHECK(argc.is_immediate() || r3.is(argc.reg()));
5246
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005247 // context save
5248 __ push(context);
5249 // load context from callee
5250 __ ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset));
5251
5252 // callee
5253 __ push(callee);
5254
5255 // call data
5256 __ push(call_data);
5257
5258 Register scratch = call_data;
5259 if (!call_data_undefined) {
5260 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5261 }
5262 // return value
5263 __ push(scratch);
5264 // return value default
5265 __ push(scratch);
5266 // isolate
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005267 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005268 __ push(scratch);
5269 // holder
5270 __ push(holder);
5271
5272 // Prepare arguments.
5273 __ mov(scratch, sp);
5274
5275 // Allocate the v8::Arguments structure in the arguments' space since
5276 // it's not controlled by GC.
5277 const int kApiStackSpace = 4;
5278
5279 FrameScope frame_scope(masm, StackFrame::MANUAL);
5280 __ EnterExitFrame(false, kApiStackSpace);
5281
5282 DCHECK(!api_function_address.is(r0) && !scratch.is(r0));
5283 // r0 = FunctionCallbackInfo&
5284 // Arguments is after the return address.
5285 __ add(r0, sp, Operand(1 * kPointerSize));
5286 // FunctionCallbackInfo::implicit_args_
5287 __ str(scratch, MemOperand(r0, 0 * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005288 if (argc.is_immediate()) {
5289 // FunctionCallbackInfo::values_
5290 __ add(ip, scratch,
5291 Operand((FCA::kArgsLength - 1 + argc.immediate()) * kPointerSize));
5292 __ str(ip, MemOperand(r0, 1 * kPointerSize));
5293 // FunctionCallbackInfo::length_ = argc
5294 __ mov(ip, Operand(argc.immediate()));
5295 __ str(ip, MemOperand(r0, 2 * kPointerSize));
5296 // FunctionCallbackInfo::is_construct_call_ = 0
5297 __ mov(ip, Operand::Zero());
5298 __ str(ip, MemOperand(r0, 3 * kPointerSize));
5299 } else {
5300 // FunctionCallbackInfo::values_
5301 __ add(ip, scratch, Operand(argc.reg(), LSL, kPointerSizeLog2));
5302 __ add(ip, ip, Operand((FCA::kArgsLength - 1) * kPointerSize));
5303 __ str(ip, MemOperand(r0, 1 * kPointerSize));
5304 // FunctionCallbackInfo::length_ = argc
5305 __ str(argc.reg(), MemOperand(r0, 2 * kPointerSize));
5306 // FunctionCallbackInfo::is_construct_call_
5307 __ add(argc.reg(), argc.reg(), Operand(FCA::kArgsLength + 1));
5308 __ mov(ip, Operand(argc.reg(), LSL, kPointerSizeLog2));
5309 __ str(ip, MemOperand(r0, 3 * kPointerSize));
5310 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005312 ExternalReference thunk_ref =
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005313 ExternalReference::invoke_function_callback(masm->isolate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005314
5315 AllowExternalCallThatCantCauseGC scope(masm);
5316 MemOperand context_restore_operand(
5317 fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
5318 // Stores return the first js argument
5319 int return_value_offset = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005320 if (return_first_arg) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005321 return_value_offset = 2 + FCA::kArgsLength;
5322 } else {
5323 return_value_offset = 2 + FCA::kReturnValueOffset;
5324 }
5325 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005326 int stack_space = 0;
5327 MemOperand is_construct_call_operand = MemOperand(sp, 4 * kPointerSize);
5328 MemOperand* stack_space_operand = &is_construct_call_operand;
5329 if (argc.is_immediate()) {
5330 stack_space = argc.immediate() + FCA::kArgsLength + 1;
5331 stack_space_operand = NULL;
5332 }
5333 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
5334 stack_space_operand, return_value_operand,
5335 &context_restore_operand);
5336}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005337
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005338
5339void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5340 bool call_data_undefined = this->call_data_undefined();
5341 CallApiFunctionStubHelper(masm, ParameterCount(r3), false,
5342 call_data_undefined);
5343}
5344
5345
5346void CallApiAccessorStub::Generate(MacroAssembler* masm) {
5347 bool is_store = this->is_store();
5348 int argc = this->argc();
5349 bool call_data_undefined = this->call_data_undefined();
5350 CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
5351 call_data_undefined);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005352}
5353
5354
5355void CallApiGetterStub::Generate(MacroAssembler* masm) {
5356 // ----------- S t a t e -------------
5357 // -- sp[0] : name
5358 // -- sp[4 - kArgsLength*4] : PropertyCallbackArguments object
5359 // -- ...
5360 // -- r2 : api_function_address
5361 // -----------------------------------
5362
5363 Register api_function_address = ApiGetterDescriptor::function_address();
5364 DCHECK(api_function_address.is(r2));
5365
5366 __ mov(r0, sp); // r0 = Handle<Name>
5367 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = PCA
5368
5369 const int kApiStackSpace = 1;
5370 FrameScope frame_scope(masm, StackFrame::MANUAL);
5371 __ EnterExitFrame(false, kApiStackSpace);
5372
5373 // Create PropertyAccessorInfo instance on the stack above the exit frame with
5374 // r1 (internal::Object** args_) as the data.
5375 __ str(r1, MemOperand(sp, 1 * kPointerSize));
5376 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
5377
5378 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
5379
5380 ExternalReference thunk_ref =
5381 ExternalReference::invoke_accessor_getter_callback(isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005382 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5383 kStackUnwindSpace, NULL,
5384 MemOperand(fp, 6 * kPointerSize), NULL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005385}
5386
5387
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005388#undef __
5389
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005390} // namespace internal
5391} // namespace v8
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005392
5393#endif // V8_TARGET_ARCH_ARM