blob: 2139d871db5d37d23bde27d4b1f685c40659cbc5 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/ppc/codegen-ppc.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006
7#if V8_TARGET_ARCH_PPC
8
9#include "src/codegen.h"
10#include "src/macro-assembler.h"
11#include "src/ppc/simulator-ppc.h"
12
13namespace v8 {
14namespace internal {
15
16
17#define __ masm.
18
19
20#if defined(USE_SIMULATOR)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021byte* fast_exp_ppc_machine_code = nullptr;
22double fast_exp_simulator(double x, Isolate* isolate) {
23 return Simulator::current(isolate)
Emily Bernierd0a1eb72015-03-24 16:35:39 -040024 ->CallFPReturnsDouble(fast_exp_ppc_machine_code, x, 0);
25}
26#endif
27
28
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029UnaryMathFunctionWithIsolate CreateExpFunction(Isolate* isolate) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040030 size_t actual_size;
31 byte* buffer =
32 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 if (buffer == nullptr) return nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040034 ExternalReference::InitializeMathExpData();
35
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
37 CodeObjectRequired::kNo);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038
39 {
40 DoubleRegister input = d1;
41 DoubleRegister result = d2;
42 DoubleRegister double_scratch1 = d3;
43 DoubleRegister double_scratch2 = d4;
44 Register temp1 = r7;
45 Register temp2 = r8;
46 Register temp3 = r9;
47
48// Called from C
Emily Bernierd0a1eb72015-03-24 16:35:39 -040049 __ function_descriptor();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050
51 __ Push(temp3, temp2, temp1);
52 MathExpGenerator::EmitMathExp(&masm, input, result, double_scratch1,
53 double_scratch2, temp1, temp2, temp3);
54 __ Pop(temp3, temp2, temp1);
55 __ fmr(d1, result);
56 __ Ret();
57 }
58
59 CodeDesc desc;
60 masm.GetCode(&desc);
Ben Murdoch097c5b22016-05-18 11:27:45 +010061 DCHECK(ABI_USES_FUNCTION_DESCRIPTORS || !RelocInfo::RequiresRelocation(desc));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 Assembler::FlushICache(isolate, buffer, actual_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040064 base::OS::ProtectCode(buffer, actual_size);
65
66#if !defined(USE_SIMULATOR)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000067 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040068#else
69 fast_exp_ppc_machine_code = buffer;
70 return &fast_exp_simulator;
71#endif
72}
73
74
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000075UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040076#if defined(USE_SIMULATOR)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000077 return nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078#else
79 size_t actual_size;
80 byte* buffer =
81 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000082 if (buffer == nullptr) return nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040083
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000084 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
85 CodeObjectRequired::kNo);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040086
87// Called from C
Emily Bernierd0a1eb72015-03-24 16:35:39 -040088 __ function_descriptor();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040089
90 __ MovFromFloatParameter(d1);
91 __ fsqrt(d1, d1);
92 __ MovToFloatResult(d1);
93 __ Ret();
94
95 CodeDesc desc;
96 masm.GetCode(&desc);
Ben Murdoch097c5b22016-05-18 11:27:45 +010097 DCHECK(ABI_USES_FUNCTION_DESCRIPTORS || !RelocInfo::RequiresRelocation(desc));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040098
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099 Assembler::FlushICache(isolate, buffer, actual_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400100 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000101 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400102#endif
103}
104
105#undef __
106
107
108// -------------------------------------------------------------------------
109// Platform-specific RuntimeCallHelper functions.
110
111void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
112 masm->EnterFrame(StackFrame::INTERNAL);
113 DCHECK(!masm->has_frame());
114 masm->set_has_frame(true);
115}
116
117
118void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
119 masm->LeaveFrame(StackFrame::INTERNAL);
120 DCHECK(masm->has_frame());
121 masm->set_has_frame(false);
122}
123
124
125// -------------------------------------------------------------------------
126// Code generators
127
128#define __ ACCESS_MASM(masm)
129
130void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
131 MacroAssembler* masm, Register receiver, Register key, Register value,
132 Register target_map, AllocationSiteMode mode,
133 Label* allocation_memento_found) {
134 Register scratch_elements = r7;
135 DCHECK(!AreAliased(receiver, key, value, target_map, scratch_elements));
136
137 if (mode == TRACK_ALLOCATION_SITE) {
138 DCHECK(allocation_memento_found != NULL);
Ben Murdochda12d292016-06-02 14:46:10 +0100139 __ JumpIfJSArrayHasAllocationMemento(receiver, scratch_elements, r11,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400140 allocation_memento_found);
141 }
142
143 // Set transitioned map.
144 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
145 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, r11,
146 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
147 OMIT_SMI_CHECK);
148}
149
150
151void ElementsTransitionGenerator::GenerateSmiToDouble(
152 MacroAssembler* masm, Register receiver, Register key, Register value,
153 Register target_map, AllocationSiteMode mode, Label* fail) {
154 // lr contains the return address
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000155 Label loop, entry, convert_hole, only_change_map, done;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400156 Register elements = r7;
157 Register length = r8;
158 Register array = r9;
159 Register array_end = array;
160
161 // target_map parameter can be clobbered.
162 Register scratch1 = target_map;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000163 Register scratch2 = r10;
164 Register scratch3 = r11;
165 Register scratch4 = r14;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400166
167 // Verify input registers don't conflict with locals.
168 DCHECK(!AreAliased(receiver, key, value, target_map, elements, length, array,
169 scratch2));
170
171 if (mode == TRACK_ALLOCATION_SITE) {
Ben Murdochda12d292016-06-02 14:46:10 +0100172 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, scratch3, fail);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400173 }
174
175 // Check for empty arrays, which only require a map transition and no changes
176 // to the backing store.
177 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
178 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
179 __ beq(&only_change_map);
180
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 __ LoadP(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
182 // length: number of elements (smi-tagged)
183
184 // Allocate new FixedDoubleArray.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 __ SmiToDoubleArrayOffset(scratch3, length);
186 __ addi(scratch3, scratch3, Operand(FixedDoubleArray::kHeaderSize));
187 __ Allocate(scratch3, array, scratch4, scratch2, fail, DOUBLE_ALIGNMENT);
Ben Murdochc5610432016-08-08 18:44:38 +0100188 __ subi(array, array, Operand(kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189 // array: destination FixedDoubleArray, not tagged as heap object.
190 // elements: source FixedArray.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400191
192 // Set destination FixedDoubleArray's length and map.
193 __ LoadRoot(scratch2, Heap::kFixedDoubleArrayMapRootIndex);
194 __ StoreP(length, MemOperand(array, FixedDoubleArray::kLengthOffset));
195 // Update receiver's map.
196 __ StoreP(scratch2, MemOperand(array, HeapObject::kMapOffset));
197
198 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
199 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400201 OMIT_SMI_CHECK);
202 // Replace receiver's backing store with newly created FixedDoubleArray.
203 __ addi(scratch1, array, Operand(kHeapObjectTag));
204 __ StoreP(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset), r0);
205 __ RecordWriteField(receiver, JSObject::kElementsOffset, scratch1, scratch2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000206 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400207 OMIT_SMI_CHECK);
208
209 // Prepare for conversion loop.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000210 __ addi(scratch1, elements,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400211 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000212 __ addi(scratch2, array, Operand(FixedDoubleArray::kHeaderSize));
213 __ SmiToDoubleArrayOffset(array_end, length);
214 __ add(array_end, scratch2, array_end);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400215// Repurpose registers no longer in use.
216#if V8_TARGET_ARCH_PPC64
217 Register hole_int64 = elements;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000218 __ mov(hole_int64, Operand(kHoleNanInt64));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400219#else
220 Register hole_lower = elements;
221 Register hole_upper = length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000222 __ mov(hole_lower, Operand(kHoleNanLower32));
223 __ mov(hole_upper, Operand(kHoleNanUpper32));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400224#endif
225 // scratch1: begin of source FixedArray element fields, not tagged
226 // hole_lower: kHoleNanLower32 OR hol_int64
227 // hole_upper: kHoleNanUpper32
228 // array_end: end of destination FixedDoubleArray, not tagged
229 // scratch2: begin of FixedDoubleArray element fields, not tagged
230
231 __ b(&entry);
232
233 __ bind(&only_change_map);
234 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
235 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch2,
236 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
237 OMIT_SMI_CHECK);
238 __ b(&done);
239
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400240 // Convert and copy elements.
241 __ bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242 __ LoadP(scratch3, MemOperand(scratch1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400243 __ addi(scratch1, scratch1, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244 // scratch3: current element
245 __ UntagAndJumpIfNotSmi(scratch3, scratch3, &convert_hole);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400246
247 // Normal smi, convert to double and store.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 __ ConvertIntToDouble(scratch3, d0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400249 __ stfd(d0, MemOperand(scratch2, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000250 __ addi(scratch2, scratch2, Operand(8));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400251 __ b(&entry);
252
253 // Hole found, store the-hole NaN.
254 __ bind(&convert_hole);
255 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000256 __ LoadP(scratch3, MemOperand(scratch1, -kPointerSize));
257 __ CompareRoot(scratch3, Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400258 __ Assert(eq, kObjectFoundInSmiOnlyArray);
259 }
260#if V8_TARGET_ARCH_PPC64
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000261 __ std(hole_int64, MemOperand(scratch2, 0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400262#else
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263 __ stw(hole_upper, MemOperand(scratch2, Register::kExponentOffset));
264 __ stw(hole_lower, MemOperand(scratch2, Register::kMantissaOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400265#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 __ addi(scratch2, scratch2, Operand(8));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400267
268 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000269 __ cmp(scratch2, array_end);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270 __ blt(&loop);
271
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400272 __ bind(&done);
273}
274
275
276void ElementsTransitionGenerator::GenerateDoubleToObject(
277 MacroAssembler* masm, Register receiver, Register key, Register value,
278 Register target_map, AllocationSiteMode mode, Label* fail) {
279 // Register lr contains the return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280 Label loop, convert_hole, gc_required, only_change_map;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400281 Register elements = r7;
282 Register array = r9;
283 Register length = r8;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 Register scratch = r10;
285 Register scratch3 = r11;
286 Register hole_value = r14;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400287
288 // Verify input registers don't conflict with locals.
289 DCHECK(!AreAliased(receiver, key, value, target_map, elements, array, length,
290 scratch));
291
292 if (mode == TRACK_ALLOCATION_SITE) {
Ben Murdochda12d292016-06-02 14:46:10 +0100293 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, scratch3, fail);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400294 }
295
296 // Check for empty arrays, which only require a map transition and no changes
297 // to the backing store.
298 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
299 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
300 __ beq(&only_change_map);
301
302 __ Push(target_map, receiver, key, value);
303 __ LoadP(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
304 // elements: source FixedDoubleArray
305 // length: number of elements (smi-tagged)
306
307 // Allocate new FixedArray.
308 // Re-use value and target_map registers, as they have been saved on the
309 // stack.
310 Register array_size = value;
311 Register allocate_scratch = target_map;
312 __ li(array_size, Operand(FixedDoubleArray::kHeaderSize));
313 __ SmiToPtrArrayOffset(r0, length);
314 __ add(array_size, array_size, r0);
315 __ Allocate(array_size, array, allocate_scratch, scratch, &gc_required,
316 NO_ALLOCATION_FLAGS);
Ben Murdochc5610432016-08-08 18:44:38 +0100317 // array: destination FixedArray, tagged as heap object
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400318 // Set destination FixedDoubleArray's length and map.
319 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
Ben Murdochc5610432016-08-08 18:44:38 +0100320 __ StoreP(length, FieldMemOperand(array,
321 FixedDoubleArray::kLengthOffset), r0);
322 __ StoreP(scratch, FieldMemOperand(array, HeapObject::kMapOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400323
324 // Prepare for conversion loop.
325 Register src_elements = elements;
326 Register dst_elements = target_map;
327 Register dst_end = length;
328 Register heap_number_map = scratch;
329 __ addi(src_elements, elements,
330 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
331 __ SmiToPtrArrayOffset(length, length);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 __ LoadRoot(hole_value, Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400333
334 Label initialization_loop, loop_done;
335 __ ShiftRightImm(r0, length, Operand(kPointerSizeLog2), SetRC);
336 __ beq(&loop_done, cr0);
337
338 // Allocating heap numbers in the loop below can fail and cause a jump to
339 // gc_required. We can't leave a partly initialized FixedArray behind,
340 // so pessimistically fill it with holes now.
341 __ mtctr(r0);
342 __ addi(dst_elements, array,
343 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
344 __ bind(&initialization_loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345 __ StorePU(hole_value, MemOperand(dst_elements, kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400346 __ bdnz(&initialization_loop);
347
348 __ addi(dst_elements, array,
349 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
350 __ add(dst_end, dst_elements, length);
351 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
352 // Using offsetted addresses in src_elements to fully take advantage of
353 // post-indexing.
354 // dst_elements: begin of destination FixedArray element fields, not tagged
355 // src_elements: begin of source FixedDoubleArray element fields,
356 // not tagged, +4
357 // dst_end: end of destination FixedArray, not tagged
358 // array: destination FixedArray
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000359 // hole_value: the-hole pointer
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400360 // heap_number_map: heap number map
361 __ b(&loop);
362
363 // Call into runtime if GC is required.
364 __ bind(&gc_required);
365 __ Pop(target_map, receiver, key, value);
366 __ b(fail);
367
368 // Replace the-hole NaN with the-hole pointer.
369 __ bind(&convert_hole);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 __ StoreP(hole_value, MemOperand(dst_elements));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400371 __ addi(dst_elements, dst_elements, Operand(kPointerSize));
372 __ cmpl(dst_elements, dst_end);
373 __ bge(&loop_done);
374
375 __ bind(&loop);
376 Register upper_bits = key;
377 __ lwz(upper_bits, MemOperand(src_elements, Register::kExponentOffset));
378 __ addi(src_elements, src_elements, Operand(kDoubleSize));
379 // upper_bits: current element's upper 32 bit
380 // src_elements: address of next element's upper 32 bit
381 __ Cmpi(upper_bits, Operand(kHoleNanUpper32), r0);
382 __ beq(&convert_hole);
383
384 // Non-hole double, copy value into a heap number.
385 Register heap_number = receiver;
386 Register scratch2 = value;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000387 __ AllocateHeapNumber(heap_number, scratch2, scratch3, heap_number_map,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400388 &gc_required);
389 // heap_number: new heap number
390#if V8_TARGET_ARCH_PPC64
391 __ ld(scratch2, MemOperand(src_elements, -kDoubleSize));
392 // subtract tag for std
393 __ addi(upper_bits, heap_number, Operand(-kHeapObjectTag));
394 __ std(scratch2, MemOperand(upper_bits, HeapNumber::kValueOffset));
395#else
396 __ lwz(scratch2,
397 MemOperand(src_elements, Register::kMantissaOffset - kDoubleSize));
398 __ lwz(upper_bits,
399 MemOperand(src_elements, Register::kExponentOffset - kDoubleSize));
400 __ stw(scratch2, FieldMemOperand(heap_number, HeapNumber::kMantissaOffset));
401 __ stw(upper_bits, FieldMemOperand(heap_number, HeapNumber::kExponentOffset));
402#endif
403 __ mr(scratch2, dst_elements);
404 __ StoreP(heap_number, MemOperand(dst_elements));
405 __ addi(dst_elements, dst_elements, Operand(kPointerSize));
406 __ RecordWrite(array, scratch2, heap_number, kLRHasNotBeenSaved,
407 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400408 __ cmpl(dst_elements, dst_end);
409 __ blt(&loop);
410 __ bind(&loop_done);
411
412 __ Pop(target_map, receiver, key, value);
413 // Replace receiver's backing store with newly created and filled FixedArray.
414 __ StoreP(array, FieldMemOperand(receiver, JSObject::kElementsOffset), r0);
415 __ RecordWriteField(receiver, JSObject::kElementsOffset, array, scratch,
416 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
417 OMIT_SMI_CHECK);
418
419 __ bind(&only_change_map);
420 // Update receiver's map.
421 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
422 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch,
423 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
424 OMIT_SMI_CHECK);
425}
426
427
428// assume ip can be used as a scratch register below
429void StringCharLoadGenerator::Generate(MacroAssembler* masm, Register string,
430 Register index, Register result,
431 Label* call_runtime) {
432 // Fetch the instance type of the receiver into result register.
433 __ LoadP(result, FieldMemOperand(string, HeapObject::kMapOffset));
434 __ lbz(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
435
436 // We need special handling for indirect strings.
437 Label check_sequential;
438 __ andi(r0, result, Operand(kIsIndirectStringMask));
439 __ beq(&check_sequential, cr0);
440
441 // Dispatch on the indirect string shape: slice or cons.
442 Label cons_string;
443 __ mov(ip, Operand(kSlicedNotConsMask));
444 __ and_(r0, result, ip, SetRC);
445 __ beq(&cons_string, cr0);
446
447 // Handle slices.
448 Label indirect_string_loaded;
449 __ LoadP(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
450 __ LoadP(string, FieldMemOperand(string, SlicedString::kParentOffset));
451 __ SmiUntag(ip, result);
452 __ add(index, index, ip);
453 __ b(&indirect_string_loaded);
454
455 // Handle cons strings.
456 // Check whether the right hand side is the empty string (i.e. if
457 // this is really a flat string in a cons string). If that is not
458 // the case we would rather go to the runtime system now to flatten
459 // the string.
460 __ bind(&cons_string);
461 __ LoadP(result, FieldMemOperand(string, ConsString::kSecondOffset));
462 __ CompareRoot(result, Heap::kempty_stringRootIndex);
463 __ bne(call_runtime);
464 // Get the first of the two strings and load its instance type.
465 __ LoadP(string, FieldMemOperand(string, ConsString::kFirstOffset));
466
467 __ bind(&indirect_string_loaded);
468 __ LoadP(result, FieldMemOperand(string, HeapObject::kMapOffset));
469 __ lbz(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
470
471 // Distinguish sequential and external strings. Only these two string
472 // representations can reach here (slices and flat cons strings have been
473 // reduced to the underlying sequential or external string).
474 Label external_string, check_encoding;
475 __ bind(&check_sequential);
476 STATIC_ASSERT(kSeqStringTag == 0);
477 __ andi(r0, result, Operand(kStringRepresentationMask));
478 __ bne(&external_string, cr0);
479
480 // Prepare sequential strings
481 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
482 __ addi(string, string,
483 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
484 __ b(&check_encoding);
485
486 // Handle external strings.
487 __ bind(&external_string);
488 if (FLAG_debug_code) {
489 // Assert that we do not have a cons or slice (indirect strings) here.
490 // Sequential strings have already been ruled out.
491 __ andi(r0, result, Operand(kIsIndirectStringMask));
492 __ Assert(eq, kExternalStringExpectedButNotFound, cr0);
493 }
494 // Rule out short external strings.
495 STATIC_ASSERT(kShortExternalStringTag != 0);
496 __ andi(r0, result, Operand(kShortExternalStringMask));
497 __ bne(call_runtime, cr0);
498 __ LoadP(string,
499 FieldMemOperand(string, ExternalString::kResourceDataOffset));
500
501 Label one_byte, done;
502 __ bind(&check_encoding);
503 STATIC_ASSERT(kTwoByteStringTag == 0);
504 __ andi(r0, result, Operand(kStringEncodingMask));
505 __ bne(&one_byte, cr0);
506 // Two-byte string.
507 __ ShiftLeftImm(result, index, Operand(1));
508 __ lhzx(result, MemOperand(string, result));
509 __ b(&done);
510 __ bind(&one_byte);
511 // One-byte string.
512 __ lbzx(result, MemOperand(string, index));
513 __ bind(&done);
514}
515
516
517static MemOperand ExpConstant(int index, Register base) {
518 return MemOperand(base, index * kDoubleSize);
519}
520
521
522void MathExpGenerator::EmitMathExp(MacroAssembler* masm, DoubleRegister input,
523 DoubleRegister result,
524 DoubleRegister double_scratch1,
525 DoubleRegister double_scratch2,
526 Register temp1, Register temp2,
527 Register temp3) {
528 DCHECK(!input.is(result));
529 DCHECK(!input.is(double_scratch1));
530 DCHECK(!input.is(double_scratch2));
531 DCHECK(!result.is(double_scratch1));
532 DCHECK(!result.is(double_scratch2));
533 DCHECK(!double_scratch1.is(double_scratch2));
534 DCHECK(!temp1.is(temp2));
535 DCHECK(!temp1.is(temp3));
536 DCHECK(!temp2.is(temp3));
537 DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
538 DCHECK(!masm->serializer_enabled()); // External references not serializable.
539
540 Label zero, infinity, done;
541
542 __ mov(temp3, Operand(ExternalReference::math_exp_constants(0)));
543
544 __ lfd(double_scratch1, ExpConstant(0, temp3));
545 __ fcmpu(double_scratch1, input);
546 __ fmr(result, input);
547 __ bunordered(&done);
548 __ bge(&zero);
549
550 __ lfd(double_scratch2, ExpConstant(1, temp3));
551 __ fcmpu(input, double_scratch2);
552 __ bge(&infinity);
553
554 __ lfd(double_scratch1, ExpConstant(3, temp3));
555 __ lfd(result, ExpConstant(4, temp3));
556 __ fmul(double_scratch1, double_scratch1, input);
557 __ fadd(double_scratch1, double_scratch1, result);
558 __ MovDoubleLowToInt(temp2, double_scratch1);
559 __ fsub(double_scratch1, double_scratch1, result);
560 __ lfd(result, ExpConstant(6, temp3));
561 __ lfd(double_scratch2, ExpConstant(5, temp3));
562 __ fmul(double_scratch1, double_scratch1, double_scratch2);
563 __ fsub(double_scratch1, double_scratch1, input);
564 __ fsub(result, result, double_scratch1);
565 __ fmul(double_scratch2, double_scratch1, double_scratch1);
566 __ fmul(result, result, double_scratch2);
567 __ lfd(double_scratch2, ExpConstant(7, temp3));
568 __ fmul(result, result, double_scratch2);
569 __ fsub(result, result, double_scratch1);
570 __ lfd(double_scratch2, ExpConstant(8, temp3));
571 __ fadd(result, result, double_scratch2);
572 __ srwi(temp1, temp2, Operand(11));
573 __ andi(temp2, temp2, Operand(0x7ff));
574 __ addi(temp1, temp1, Operand(0x3ff));
575
576 // Must not call ExpConstant() after overwriting temp3!
577 __ mov(temp3, Operand(ExternalReference::math_exp_log_table()));
578 __ slwi(temp2, temp2, Operand(3));
579#if V8_TARGET_ARCH_PPC64
580 __ ldx(temp2, MemOperand(temp3, temp2));
581 __ sldi(temp1, temp1, Operand(52));
582 __ orx(temp2, temp1, temp2);
583 __ MovInt64ToDouble(double_scratch1, temp2);
584#else
585 __ add(ip, temp3, temp2);
586 __ lwz(temp3, MemOperand(ip, Register::kExponentOffset));
587 __ lwz(temp2, MemOperand(ip, Register::kMantissaOffset));
588 __ slwi(temp1, temp1, Operand(20));
589 __ orx(temp3, temp1, temp3);
590 __ MovInt64ToDouble(double_scratch1, temp3, temp2);
591#endif
592
593 __ fmul(result, result, double_scratch1);
594 __ b(&done);
595
596 __ bind(&zero);
597 __ fmr(result, kDoubleRegZero);
598 __ b(&done);
599
600 __ bind(&infinity);
601 __ lfd(result, ExpConstant(2, temp3));
602
603 __ bind(&done);
604}
605
606#undef __
607
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000608CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
609 USE(isolate);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400610 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
611 // Since patcher is a large object, allocate it dynamically when needed,
612 // to avoid overloading the stack in stress conditions.
613 // DONT_FLUSH is used because the CodeAgingHelper is initialized early in
614 // the process, before ARM simulator ICache is setup.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000615 base::SmartPointer<CodePatcher> patcher(
616 new CodePatcher(isolate, young_sequence_.start(),
617 young_sequence_.length() / Assembler::kInstrSize,
618 CodePatcher::DONT_FLUSH));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400619 PredictableCodeSizeScope scope(patcher->masm(), young_sequence_.length());
Ben Murdochda12d292016-06-02 14:46:10 +0100620 patcher->masm()->PushStandardFrame(r4);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400621 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) {
622 patcher->masm()->nop();
623 }
624}
625
626
627#ifdef DEBUG
628bool CodeAgingHelper::IsOld(byte* candidate) const {
629 return Assembler::IsNop(Assembler::instr_at(candidate));
630}
631#endif
632
633
634bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
635 bool result = isolate->code_aging_helper()->IsYoung(sequence);
636 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
637 return result;
638}
639
640
641void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
642 MarkingParity* parity) {
643 if (IsYoungSequence(isolate, sequence)) {
644 *age = kNoAgeCodeAge;
645 *parity = NO_MARKING_PARITY;
646 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000647 Code* code = NULL;
648 Address target_address =
649 Assembler::target_address_at(sequence + kCodeAgingTargetDelta, code);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400650 Code* stub = GetCodeFromTargetAddress(target_address);
651 GetCodeAgeAndParity(stub, age, parity);
652 }
653}
654
655
656void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Code::Age age,
657 MarkingParity parity) {
658 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
659 if (age == kNoAgeCodeAge) {
660 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000661 Assembler::FlushICache(isolate, sequence, young_length);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400662 } else {
663 // FIXED_SEQUENCE
664 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000665 CodePatcher patcher(isolate, sequence,
666 young_length / Assembler::kInstrSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400667 Assembler::BlockTrampolinePoolScope block_trampoline_pool(patcher.masm());
668 intptr_t target = reinterpret_cast<intptr_t>(stub->instruction_start());
669 // Don't use Call -- we need to preserve ip and lr.
670 // GenerateMakeCodeYoungAgainCommon for the stub code.
671 patcher.masm()->nop(); // marker to detect sequence (see IsOld)
672 patcher.masm()->mov(r3, Operand(target));
673 patcher.masm()->Jump(r3);
674 for (int i = 0; i < kCodeAgingSequenceNops; i++) {
675 patcher.masm()->nop();
676 }
677 }
678}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000679} // namespace internal
680} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400681
682#endif // V8_TARGET_ARCH_PPC