blob: a7b38ffee64e637a82f051c940c02f79c653933d [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/arm/codegen-arm.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_ARM
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/arm/simulator-arm.h"
10#include "src/codegen.h"
11#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
Ben Murdoch3ef787d2012-04-12 10:51:47 +010016
Ben Murdochb8a8cc12014-11-26 15:28:44 +000017#define __ masm.
18
Ben Murdochb8a8cc12014-11-26 15:28:44 +000019#if defined(V8_HOST_ARCH_ARM)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
21 MemCopyUint8Function stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022#if defined(USE_SIMULATOR)
23 return stub;
24#else
25 if (!CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) return stub;
26 size_t actual_size;
27 byte* buffer =
28 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029 if (buffer == nullptr) return stub;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000030
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
32 CodeObjectRequired::kNo);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033
34 Register dest = r0;
35 Register src = r1;
36 Register chars = r2;
37 Register temp1 = r3;
38 Label less_4;
39
40 if (CpuFeatures::IsSupported(NEON)) {
41 Label loop, less_256, less_128, less_64, less_32, _16_or_less, _8_or_less;
42 Label size_less_than_8;
43 __ pld(MemOperand(src, 0));
44
45 __ cmp(chars, Operand(8));
46 __ b(lt, &size_less_than_8);
47 __ cmp(chars, Operand(32));
48 __ b(lt, &less_32);
Ben Murdoch097c5b22016-05-18 11:27:45 +010049 if (CpuFeatures::dcache_line_size() == 32) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000050 __ pld(MemOperand(src, 32));
51 }
52 __ cmp(chars, Operand(64));
53 __ b(lt, &less_64);
54 __ pld(MemOperand(src, 64));
Ben Murdoch097c5b22016-05-18 11:27:45 +010055 if (CpuFeatures::dcache_line_size() == 32) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000056 __ pld(MemOperand(src, 96));
57 }
58 __ cmp(chars, Operand(128));
59 __ b(lt, &less_128);
60 __ pld(MemOperand(src, 128));
Ben Murdoch097c5b22016-05-18 11:27:45 +010061 if (CpuFeatures::dcache_line_size() == 32) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000062 __ pld(MemOperand(src, 160));
63 }
64 __ pld(MemOperand(src, 192));
Ben Murdoch097c5b22016-05-18 11:27:45 +010065 if (CpuFeatures::dcache_line_size() == 32) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 __ pld(MemOperand(src, 224));
67 }
68 __ cmp(chars, Operand(256));
69 __ b(lt, &less_256);
70 __ sub(chars, chars, Operand(256));
71
72 __ bind(&loop);
73 __ pld(MemOperand(src, 256));
74 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
Ben Murdoch097c5b22016-05-18 11:27:45 +010075 if (CpuFeatures::dcache_line_size() == 32) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076 __ pld(MemOperand(src, 256));
77 }
78 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
79 __ sub(chars, chars, Operand(64), SetCC);
80 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
81 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
82 __ b(ge, &loop);
83 __ add(chars, chars, Operand(256));
84
85 __ bind(&less_256);
86 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
87 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
88 __ sub(chars, chars, Operand(128));
89 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
90 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
91 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
92 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
93 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
94 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
95 __ cmp(chars, Operand(64));
96 __ b(lt, &less_64);
97
98 __ bind(&less_128);
99 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
100 __ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));
101 __ sub(chars, chars, Operand(64));
102 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
103 __ vst1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(dest, PostIndex));
104
105 __ bind(&less_64);
106 __ cmp(chars, Operand(32));
107 __ b(lt, &less_32);
108 __ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
109 __ vst1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(dest, PostIndex));
110 __ sub(chars, chars, Operand(32));
111
112 __ bind(&less_32);
113 __ cmp(chars, Operand(16));
114 __ b(le, &_16_or_less);
115 __ vld1(Neon8, NeonListOperand(d0, 2), NeonMemOperand(src, PostIndex));
116 __ vst1(Neon8, NeonListOperand(d0, 2), NeonMemOperand(dest, PostIndex));
117 __ sub(chars, chars, Operand(16));
118
119 __ bind(&_16_or_less);
120 __ cmp(chars, Operand(8));
121 __ b(le, &_8_or_less);
122 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src, PostIndex));
123 __ vst1(Neon8, NeonListOperand(d0), NeonMemOperand(dest, PostIndex));
124 __ sub(chars, chars, Operand(8));
125
126 // Do a last copy which may overlap with the previous copy (up to 8 bytes).
127 __ bind(&_8_or_less);
128 __ rsb(chars, chars, Operand(8));
129 __ sub(src, src, Operand(chars));
130 __ sub(dest, dest, Operand(chars));
131 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src));
132 __ vst1(Neon8, NeonListOperand(d0), NeonMemOperand(dest));
133
134 __ Ret();
135
136 __ bind(&size_less_than_8);
137
138 __ bic(temp1, chars, Operand(0x3), SetCC);
139 __ b(&less_4, eq);
140 __ ldr(temp1, MemOperand(src, 4, PostIndex));
141 __ str(temp1, MemOperand(dest, 4, PostIndex));
142 } else {
143 Register temp2 = ip;
144 Label loop;
145
146 __ bic(temp2, chars, Operand(0x3), SetCC);
147 __ b(&less_4, eq);
148 __ add(temp2, dest, temp2);
149
150 __ bind(&loop);
151 __ ldr(temp1, MemOperand(src, 4, PostIndex));
152 __ str(temp1, MemOperand(dest, 4, PostIndex));
153 __ cmp(dest, temp2);
154 __ b(&loop, ne);
155 }
156
157 __ bind(&less_4);
158 __ mov(chars, Operand(chars, LSL, 31), SetCC);
159 // bit0 => Z (ne), bit1 => C (cs)
160 __ ldrh(temp1, MemOperand(src, 2, PostIndex), cs);
161 __ strh(temp1, MemOperand(dest, 2, PostIndex), cs);
162 __ ldrb(temp1, MemOperand(src), ne);
163 __ strb(temp1, MemOperand(dest), ne);
164 __ Ret();
165
166 CodeDesc desc;
167 masm.GetCode(&desc);
168 DCHECK(!RelocInfo::RequiresRelocation(desc));
169
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000170 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 base::OS::ProtectCode(buffer, actual_size);
172 return FUNCTION_CAST<MemCopyUint8Function>(buffer);
173#endif
174}
175
176
177// Convert 8 to 16. The number of character to copy must be at least 8.
178MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000179 Isolate* isolate, MemCopyUint16Uint8Function stub) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180#if defined(USE_SIMULATOR)
181 return stub;
182#else
183 if (!CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) return stub;
184 size_t actual_size;
185 byte* buffer =
186 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000187 if (buffer == nullptr) return stub;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000188
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
190 CodeObjectRequired::kNo);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191
192 Register dest = r0;
193 Register src = r1;
194 Register chars = r2;
195 if (CpuFeatures::IsSupported(NEON)) {
196 Register temp = r3;
197 Label loop;
198
199 __ bic(temp, chars, Operand(0x7));
200 __ sub(chars, chars, Operand(temp));
201 __ add(temp, dest, Operand(temp, LSL, 1));
202
203 __ bind(&loop);
204 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src, PostIndex));
205 __ vmovl(NeonU8, q0, d0);
206 __ vst1(Neon16, NeonListOperand(d0, 2), NeonMemOperand(dest, PostIndex));
207 __ cmp(dest, temp);
208 __ b(&loop, ne);
209
210 // Do a last copy which will overlap with the previous copy (1 to 8 bytes).
211 __ rsb(chars, chars, Operand(8));
212 __ sub(src, src, Operand(chars));
213 __ sub(dest, dest, Operand(chars, LSL, 1));
214 __ vld1(Neon8, NeonListOperand(d0), NeonMemOperand(src));
215 __ vmovl(NeonU8, q0, d0);
216 __ vst1(Neon16, NeonListOperand(d0, 2), NeonMemOperand(dest));
217 __ Ret();
218 } else {
219 Register temp1 = r3;
220 Register temp2 = ip;
221 Register temp3 = lr;
222 Register temp4 = r4;
223 Label loop;
224 Label not_two;
225
226 __ Push(lr, r4);
227 __ bic(temp2, chars, Operand(0x3));
228 __ add(temp2, dest, Operand(temp2, LSL, 1));
229
230 __ bind(&loop);
231 __ ldr(temp1, MemOperand(src, 4, PostIndex));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400232 __ uxtb16(temp3, temp1);
233 __ uxtb16(temp4, temp1, 8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000234 __ pkhbt(temp1, temp3, Operand(temp4, LSL, 16));
235 __ str(temp1, MemOperand(dest));
236 __ pkhtb(temp1, temp4, Operand(temp3, ASR, 16));
237 __ str(temp1, MemOperand(dest, 4));
238 __ add(dest, dest, Operand(8));
239 __ cmp(dest, temp2);
240 __ b(&loop, ne);
241
242 __ mov(chars, Operand(chars, LSL, 31), SetCC); // bit0 => ne, bit1 => cs
243 __ b(&not_two, cc);
244 __ ldrh(temp1, MemOperand(src, 2, PostIndex));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400245 __ uxtb(temp3, temp1, 8);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246 __ mov(temp3, Operand(temp3, LSL, 16));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400247 __ uxtab(temp3, temp3, temp1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 __ str(temp3, MemOperand(dest, 4, PostIndex));
249 __ bind(&not_two);
250 __ ldrb(temp1, MemOperand(src), ne);
251 __ strh(temp1, MemOperand(dest), ne);
252 __ Pop(pc, r4);
253 }
254
255 CodeDesc desc;
256 masm.GetCode(&desc);
257
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259 base::OS::ProtectCode(buffer, actual_size);
260
261 return FUNCTION_CAST<MemCopyUint16Uint8Function>(buffer);
262#endif
263}
264#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267#if defined(USE_SIMULATOR)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269#else
270 size_t actual_size;
271 byte* buffer =
272 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 if (buffer == nullptr) return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000275 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
276 CodeObjectRequired::kNo);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000277
278 __ MovFromFloatParameter(d0);
279 __ vsqrt(d0, d0);
280 __ MovToFloatResult(d0);
281 __ Ret();
282
283 CodeDesc desc;
284 masm.GetCode(&desc);
285 DCHECK(!RelocInfo::RequiresRelocation(desc));
286
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000287 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000288 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000289 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290#endif
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100291}
292
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293#undef __
294
295
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100296// -------------------------------------------------------------------------
297// Platform-specific RuntimeCallHelper functions.
298
Ben Murdochb0fe1622011-05-05 13:52:32 +0100299void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100300 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000301 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100302 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100303}
304
305
Ben Murdochb0fe1622011-05-05 13:52:32 +0100306void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100307 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100309 masm->set_has_frame(false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000310}
311
312
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100313// -------------------------------------------------------------------------
314// Code generators
315
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316#define __ ACCESS_MASM(masm)
317
318void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
319 MacroAssembler* masm,
320 Register receiver,
321 Register key,
322 Register value,
323 Register target_map,
324 AllocationSiteMode mode,
325 Label* allocation_memento_found) {
326 Register scratch_elements = r4;
327 DCHECK(!AreAliased(receiver, key, value, target_map,
328 scratch_elements));
329
330 if (mode == TRACK_ALLOCATION_SITE) {
331 DCHECK(allocation_memento_found != NULL);
332 __ JumpIfJSArrayHasAllocationMemento(
333 receiver, scratch_elements, allocation_memento_found);
334 }
335
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100336 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
338 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100339 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000340 target_map,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100341 r9,
342 kLRHasNotBeenSaved,
343 kDontSaveFPRegs,
344 EMIT_REMEMBERED_SET,
345 OMIT_SMI_CHECK);
346}
347
348
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000349void ElementsTransitionGenerator::GenerateSmiToDouble(
350 MacroAssembler* masm,
351 Register receiver,
352 Register key,
353 Register value,
354 Register target_map,
355 AllocationSiteMode mode,
356 Label* fail) {
357 // Register lr contains the return address.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100358 Label loop, entry, convert_hole, gc_required, only_change_map, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359 Register elements = r4;
360 Register length = r5;
361 Register array = r6;
362 Register array_end = array;
363
364 // target_map parameter can be clobbered.
365 Register scratch1 = target_map;
366 Register scratch2 = r9;
367
368 // Verify input registers don't conflict with locals.
369 DCHECK(!AreAliased(receiver, key, value, target_map,
370 elements, length, array, scratch2));
371
372 if (mode == TRACK_ALLOCATION_SITE) {
373 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail);
374 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100375
376 // Check for empty arrays, which only require a map transition and no changes
377 // to the backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000378 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
379 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100380 __ b(eq, &only_change_map);
381
382 __ push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 __ ldr(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
384 // length: number of elements (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100385
386 // Allocate new FixedDoubleArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000387 // Use lr as a temporary register.
388 __ mov(lr, Operand(length, LSL, 2));
389 __ add(lr, lr, Operand(FixedDoubleArray::kHeaderSize));
390 __ Allocate(lr, array, elements, scratch2, &gc_required, DOUBLE_ALIGNMENT);
Ben Murdochc5610432016-08-08 18:44:38 +0100391 __ sub(array, array, Operand(kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000392 // array: destination FixedDoubleArray, not tagged as heap object.
393 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
394 // r4: source FixedArray.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100395
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000396 // Set destination FixedDoubleArray's length and map.
397 __ LoadRoot(scratch2, Heap::kFixedDoubleArrayMapRootIndex);
398 __ str(length, MemOperand(array, FixedDoubleArray::kLengthOffset));
399 // Update receiver's map.
400 __ str(scratch2, MemOperand(array, HeapObject::kMapOffset));
401
402 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
403 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100404 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000405 target_map,
406 scratch2,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100407 kLRHasBeenSaved,
408 kDontSaveFPRegs,
409 OMIT_REMEMBERED_SET,
410 OMIT_SMI_CHECK);
411 // Replace receiver's backing store with newly created FixedDoubleArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000412 __ add(scratch1, array, Operand(kHeapObjectTag));
413 __ str(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
414 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100415 JSObject::kElementsOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000416 scratch1,
417 scratch2,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100418 kLRHasBeenSaved,
419 kDontSaveFPRegs,
420 EMIT_REMEMBERED_SET,
421 OMIT_SMI_CHECK);
422
423 // Prepare for conversion loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000424 __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
425 __ add(scratch2, array, Operand(FixedDoubleArray::kHeaderSize));
426 __ add(array_end, scratch2, Operand(length, LSL, 2));
427
428 // Repurpose registers no longer in use.
429 Register hole_lower = elements;
430 Register hole_upper = length;
431
432 __ mov(hole_lower, Operand(kHoleNanLower32));
433 __ mov(hole_upper, Operand(kHoleNanUpper32));
434 // scratch1: begin of source FixedArray element fields, not tagged
435 // hole_lower: kHoleNanLower32
436 // hole_upper: kHoleNanUpper32
437 // array_end: end of destination FixedDoubleArray, not tagged
438 // scratch2: begin of FixedDoubleArray element fields, not tagged
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100439
440 __ b(&entry);
441
442 __ bind(&only_change_map);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
444 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100445 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446 target_map,
447 scratch2,
448 kLRHasNotBeenSaved,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100449 kDontSaveFPRegs,
450 OMIT_REMEMBERED_SET,
451 OMIT_SMI_CHECK);
452 __ b(&done);
453
454 // Call into runtime if GC is required.
455 __ bind(&gc_required);
456 __ pop(lr);
457 __ b(fail);
458
459 // Convert and copy elements.
460 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000461 __ ldr(lr, MemOperand(scratch1, 4, PostIndex));
462 // lr: current element
463 __ UntagAndJumpIfNotSmi(lr, lr, &convert_hole);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100464
465 // Normal smi, convert to double and store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 __ vmov(s0, lr);
467 __ vcvt_f64_s32(d0, s0);
468 __ vstr(d0, scratch2, 0);
469 __ add(scratch2, scratch2, Operand(8));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100470 __ b(&entry);
471
472 // Hole found, store the-hole NaN.
473 __ bind(&convert_hole);
474 if (FLAG_debug_code) {
475 // Restore a "smi-untagged" heap object.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 __ SmiTag(lr);
477 __ orr(lr, lr, Operand(1));
478 __ CompareRoot(lr, Heap::kTheHoleValueRootIndex);
479 __ Assert(eq, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100480 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 __ Strd(hole_lower, hole_upper, MemOperand(scratch2, 8, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100482
483 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000484 __ cmp(scratch2, array_end);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100485 __ b(lt, &loop);
486
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100487 __ pop(lr);
488 __ bind(&done);
489}
490
491
492void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493 MacroAssembler* masm,
494 Register receiver,
495 Register key,
496 Register value,
497 Register target_map,
498 AllocationSiteMode mode,
499 Label* fail) {
500 // Register lr contains the return address.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100501 Label entry, loop, convert_hole, gc_required, only_change_map;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502 Register elements = r4;
503 Register array = r6;
504 Register length = r5;
505 Register scratch = r9;
506
507 // Verify input registers don't conflict with locals.
508 DCHECK(!AreAliased(receiver, key, value, target_map,
509 elements, array, length, scratch));
510
511 if (mode == TRACK_ALLOCATION_SITE) {
512 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail);
513 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100514
515 // Check for empty arrays, which only require a map transition and no changes
516 // to the backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
518 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100519 __ b(eq, &only_change_map);
520
521 __ push(lr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000522 __ Push(target_map, receiver, key, value);
523 __ ldr(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
524 // elements: source FixedDoubleArray
525 // length: number of elements (smi-tagged)
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100526
527 // Allocate new FixedArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000528 // Re-use value and target_map registers, as they have been saved on the
529 // stack.
530 Register array_size = value;
531 Register allocate_scratch = target_map;
532 __ mov(array_size, Operand(FixedDoubleArray::kHeaderSize));
533 __ add(array_size, array_size, Operand(length, LSL, 1));
534 __ Allocate(array_size, array, allocate_scratch, scratch, &gc_required,
535 NO_ALLOCATION_FLAGS);
Ben Murdochc5610432016-08-08 18:44:38 +0100536 // array: destination FixedArray, tagged as heap object
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100537 // Set destination FixedDoubleArray's length and map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000538 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +0100539 __ str(length, FieldMemOperand(array, FixedDoubleArray::kLengthOffset));
540 __ str(scratch, FieldMemOperand(array, HeapObject::kMapOffset));
541
542 __ sub(array, array, Operand(kHeapObjectTag));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100543
544 // Prepare for conversion loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000545 Register src_elements = elements;
546 Register dst_elements = target_map;
547 Register dst_end = length;
548 Register heap_number_map = scratch;
549 __ add(src_elements, elements,
550 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4));
551 __ add(dst_elements, array, Operand(FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000552 __ add(dst_end, dst_elements, Operand(length, LSL, 1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400553
554 // Allocating heap numbers in the loop below can fail and cause a jump to
555 // gc_required. We can't leave a partly initialized FixedArray behind,
556 // so pessimistically fill it with holes now.
557 Label initialization_loop, initialization_loop_entry;
558 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
559 __ b(&initialization_loop_entry);
560 __ bind(&initialization_loop);
561 __ str(scratch, MemOperand(dst_elements, kPointerSize, PostIndex));
562 __ bind(&initialization_loop_entry);
563 __ cmp(dst_elements, dst_end);
564 __ b(lt, &initialization_loop);
565
566 __ add(dst_elements, array, Operand(FixedArray::kHeaderSize));
567 __ add(array, array, Operand(kHeapObjectTag));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
569 // Using offsetted addresses in src_elements to fully take advantage of
570 // post-indexing.
571 // dst_elements: begin of destination FixedArray element fields, not tagged
572 // src_elements: begin of source FixedDoubleArray element fields,
573 // not tagged, +4
574 // dst_end: end of destination FixedArray, not tagged
575 // array: destination FixedArray
576 // heap_number_map: heap number map
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100577 __ b(&entry);
578
579 // Call into runtime if GC is required.
580 __ bind(&gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000581 __ Pop(target_map, receiver, key, value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582 __ pop(lr);
583 __ b(fail);
584
585 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586 Register upper_bits = key;
587 __ ldr(upper_bits, MemOperand(src_elements, 8, PostIndex));
588 // upper_bits: current element's upper 32 bit
589 // src_elements: address of next element's upper 32 bit
590 __ cmp(upper_bits, Operand(kHoleNanUpper32));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591 __ b(eq, &convert_hole);
592
593 // Non-hole double, copy value into a heap number.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 Register heap_number = receiver;
595 Register scratch2 = value;
596 __ AllocateHeapNumber(heap_number, scratch2, lr, heap_number_map,
597 &gc_required);
598 // heap_number: new heap number
599 __ ldr(scratch2, MemOperand(src_elements, 12, NegOffset));
600 __ Strd(scratch2, upper_bits,
601 FieldMemOperand(heap_number, HeapNumber::kValueOffset));
602 __ mov(scratch2, dst_elements);
603 __ str(heap_number, MemOperand(dst_elements, 4, PostIndex));
604 __ RecordWrite(array,
605 scratch2,
606 heap_number,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100607 kLRHasBeenSaved,
608 kDontSaveFPRegs,
609 EMIT_REMEMBERED_SET,
610 OMIT_SMI_CHECK);
611 __ b(&entry);
612
613 // Replace the-hole NaN with the-hole pointer.
614 __ bind(&convert_hole);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
616 __ str(scratch2, MemOperand(dst_elements, 4, PostIndex));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100617
618 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619 __ cmp(dst_elements, dst_end);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620 __ b(lt, &loop);
621
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622 __ Pop(target_map, receiver, key, value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100623 // Replace receiver's backing store with newly created and filled FixedArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000624 __ str(array, FieldMemOperand(receiver, JSObject::kElementsOffset));
625 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100626 JSObject::kElementsOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 array,
628 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100629 kLRHasBeenSaved,
630 kDontSaveFPRegs,
631 EMIT_REMEMBERED_SET,
632 OMIT_SMI_CHECK);
633 __ pop(lr);
634
635 __ bind(&only_change_map);
636 // Update receiver's map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 __ str(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
638 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100639 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640 target_map,
641 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100642 kLRHasNotBeenSaved,
643 kDontSaveFPRegs,
644 OMIT_REMEMBERED_SET,
645 OMIT_SMI_CHECK);
646}
647
648
649void StringCharLoadGenerator::Generate(MacroAssembler* masm,
650 Register string,
651 Register index,
652 Register result,
653 Label* call_runtime) {
654 // Fetch the instance type of the receiver into result register.
655 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
656 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
657
658 // We need special handling for indirect strings.
659 Label check_sequential;
660 __ tst(result, Operand(kIsIndirectStringMask));
661 __ b(eq, &check_sequential);
662
663 // Dispatch on the indirect string shape: slice or cons.
664 Label cons_string;
665 __ tst(result, Operand(kSlicedNotConsMask));
666 __ b(eq, &cons_string);
667
668 // Handle slices.
669 Label indirect_string_loaded;
670 __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
671 __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000672 __ add(index, index, Operand::SmiUntag(result));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100673 __ jmp(&indirect_string_loaded);
674
675 // Handle cons strings.
676 // Check whether the right hand side is the empty string (i.e. if
677 // this is really a flat string in a cons string). If that is not
678 // the case we would rather go to the runtime system now to flatten
679 // the string.
680 __ bind(&cons_string);
681 __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 __ CompareRoot(result, Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100683 __ b(ne, call_runtime);
684 // Get the first of the two strings and load its instance type.
685 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
686
687 __ bind(&indirect_string_loaded);
688 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
689 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
690
691 // Distinguish sequential and external strings. Only these two string
692 // representations can reach here (slices and flat cons strings have been
693 // reduced to the underlying sequential or external string).
694 Label external_string, check_encoding;
695 __ bind(&check_sequential);
696 STATIC_ASSERT(kSeqStringTag == 0);
697 __ tst(result, Operand(kStringRepresentationMask));
698 __ b(ne, &external_string);
699
700 // Prepare sequential strings
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100702 __ add(string,
703 string,
704 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
705 __ jmp(&check_encoding);
706
707 // Handle external strings.
708 __ bind(&external_string);
709 if (FLAG_debug_code) {
710 // Assert that we do not have a cons or slice (indirect strings) here.
711 // Sequential strings have already been ruled out.
712 __ tst(result, Operand(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713 __ Assert(eq, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100714 }
715 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000716 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100717 __ tst(result, Operand(kShortExternalStringMask));
718 __ b(ne, call_runtime);
719 __ ldr(string, FieldMemOperand(string, ExternalString::kResourceDataOffset));
720
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 Label one_byte, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100722 __ bind(&check_encoding);
723 STATIC_ASSERT(kTwoByteStringTag == 0);
724 __ tst(result, Operand(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725 __ b(ne, &one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100726 // Two-byte string.
727 __ ldrh(result, MemOperand(string, index, LSL, 1));
728 __ jmp(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 __ bind(&one_byte);
730 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100731 __ ldrb(result, MemOperand(string, index));
732 __ bind(&done);
733}
734
735#undef __
736
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737#ifdef DEBUG
738// add(r0, pc, Operand(-8))
739static const uint32_t kCodeAgePatchFirstInstruction = 0xe24f0008;
740#endif
741
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
743 USE(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000744 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
745 // Since patcher is a large object, allocate it dynamically when needed,
746 // to avoid overloading the stack in stress conditions.
747 // DONT_FLUSH is used because the CodeAgingHelper is initialized early in
748 // the process, before ARM simulator ICache is setup.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000749 base::SmartPointer<CodePatcher> patcher(
750 new CodePatcher(isolate, young_sequence_.start(),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751 young_sequence_.length() / Assembler::kInstrSize,
752 CodePatcher::DONT_FLUSH));
753 PredictableCodeSizeScope scope(patcher->masm(), young_sequence_.length());
Ben Murdochda12d292016-06-02 14:46:10 +0100754 patcher->masm()->PushStandardFrame(r1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 patcher->masm()->nop(ip.code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000756}
757
758
759#ifdef DEBUG
760bool CodeAgingHelper::IsOld(byte* candidate) const {
761 return Memory::uint32_at(candidate) == kCodeAgePatchFirstInstruction;
762}
763#endif
764
765
766bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
767 bool result = isolate->code_aging_helper()->IsYoung(sequence);
768 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
769 return result;
770}
771
772
773void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
774 MarkingParity* parity) {
775 if (IsYoungSequence(isolate, sequence)) {
776 *age = kNoAgeCodeAge;
777 *parity = NO_MARKING_PARITY;
778 } else {
779 Address target_address = Memory::Address_at(
780 sequence + (kNoCodeAgeSequenceLength - Assembler::kInstrSize));
781 Code* stub = GetCodeFromTargetAddress(target_address);
782 GetCodeAgeAndParity(stub, age, parity);
783 }
784}
785
786
787void Code::PatchPlatformCodeAge(Isolate* isolate,
788 byte* sequence,
789 Code::Age age,
790 MarkingParity parity) {
791 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
792 if (age == kNoAgeCodeAge) {
793 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 Assembler::FlushICache(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000795 } else {
796 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 CodePatcher patcher(isolate, sequence,
798 young_length / Assembler::kInstrSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 patcher.masm()->add(r0, pc, Operand(-8));
800 patcher.masm()->ldr(pc, MemOperand(pc, -4));
801 patcher.masm()->emit_code_stub_address(stub);
802 }
803}
804
805
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000806} // namespace internal
807} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +0100808
809#endif // V8_TARGET_ARCH_ARM