blob: d6d86b0fccb70a45c84e8c6917e7a8db761d4662 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/ppc/codegen-ppc.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006
7#if V8_TARGET_ARCH_PPC
8
9#include "src/codegen.h"
10#include "src/macro-assembler.h"
11#include "src/ppc/simulator-ppc.h"
12
13namespace v8 {
14namespace internal {
15
16
17#define __ masm.
18
19
20#if defined(USE_SIMULATOR)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000021byte* fast_exp_ppc_machine_code = nullptr;
22double fast_exp_simulator(double x, Isolate* isolate) {
23 return Simulator::current(isolate)
Emily Bernierd0a1eb72015-03-24 16:35:39 -040024 ->CallFPReturnsDouble(fast_exp_ppc_machine_code, x, 0);
25}
26#endif
27
28
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000029UnaryMathFunctionWithIsolate CreateExpFunction(Isolate* isolate) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040030 size_t actual_size;
31 byte* buffer =
32 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000033 if (buffer == nullptr) return nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040034 ExternalReference::InitializeMathExpData();
35
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
37 CodeObjectRequired::kNo);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038
39 {
40 DoubleRegister input = d1;
41 DoubleRegister result = d2;
42 DoubleRegister double_scratch1 = d3;
43 DoubleRegister double_scratch2 = d4;
44 Register temp1 = r7;
45 Register temp2 = r8;
46 Register temp3 = r9;
47
48// Called from C
Emily Bernierd0a1eb72015-03-24 16:35:39 -040049 __ function_descriptor();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050
51 __ Push(temp3, temp2, temp1);
52 MathExpGenerator::EmitMathExp(&masm, input, result, double_scratch1,
53 double_scratch2, temp1, temp2, temp3);
54 __ Pop(temp3, temp2, temp1);
55 __ fmr(d1, result);
56 __ Ret();
57 }
58
59 CodeDesc desc;
60 masm.GetCode(&desc);
Ben Murdoch097c5b22016-05-18 11:27:45 +010061 DCHECK(ABI_USES_FUNCTION_DESCRIPTORS || !RelocInfo::RequiresRelocation(desc));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 Assembler::FlushICache(isolate, buffer, actual_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040064 base::OS::ProtectCode(buffer, actual_size);
65
66#if !defined(USE_SIMULATOR)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000067 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040068#else
69 fast_exp_ppc_machine_code = buffer;
70 return &fast_exp_simulator;
71#endif
72}
73
74
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000075UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040076#if defined(USE_SIMULATOR)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000077 return nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040078#else
79 size_t actual_size;
80 byte* buffer =
81 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000082 if (buffer == nullptr) return nullptr;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040083
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000084 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
85 CodeObjectRequired::kNo);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040086
87// Called from C
Emily Bernierd0a1eb72015-03-24 16:35:39 -040088 __ function_descriptor();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040089
90 __ MovFromFloatParameter(d1);
91 __ fsqrt(d1, d1);
92 __ MovToFloatResult(d1);
93 __ Ret();
94
95 CodeDesc desc;
96 masm.GetCode(&desc);
Ben Murdoch097c5b22016-05-18 11:27:45 +010097 DCHECK(ABI_USES_FUNCTION_DESCRIPTORS || !RelocInfo::RequiresRelocation(desc));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040098
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099 Assembler::FlushICache(isolate, buffer, actual_size);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400100 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000101 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400102#endif
103}
104
105#undef __
106
107
108// -------------------------------------------------------------------------
109// Platform-specific RuntimeCallHelper functions.
110
111void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
112 masm->EnterFrame(StackFrame::INTERNAL);
113 DCHECK(!masm->has_frame());
114 masm->set_has_frame(true);
115}
116
117
118void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
119 masm->LeaveFrame(StackFrame::INTERNAL);
120 DCHECK(masm->has_frame());
121 masm->set_has_frame(false);
122}
123
124
125// -------------------------------------------------------------------------
126// Code generators
127
128#define __ ACCESS_MASM(masm)
129
130void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
131 MacroAssembler* masm, Register receiver, Register key, Register value,
132 Register target_map, AllocationSiteMode mode,
133 Label* allocation_memento_found) {
134 Register scratch_elements = r7;
135 DCHECK(!AreAliased(receiver, key, value, target_map, scratch_elements));
136
137 if (mode == TRACK_ALLOCATION_SITE) {
138 DCHECK(allocation_memento_found != NULL);
139 __ JumpIfJSArrayHasAllocationMemento(receiver, scratch_elements,
140 allocation_memento_found);
141 }
142
143 // Set transitioned map.
144 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
145 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, r11,
146 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
147 OMIT_SMI_CHECK);
148}
149
150
151void ElementsTransitionGenerator::GenerateSmiToDouble(
152 MacroAssembler* masm, Register receiver, Register key, Register value,
153 Register target_map, AllocationSiteMode mode, Label* fail) {
154 // lr contains the return address
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000155 Label loop, entry, convert_hole, only_change_map, done;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400156 Register elements = r7;
157 Register length = r8;
158 Register array = r9;
159 Register array_end = array;
160
161 // target_map parameter can be clobbered.
162 Register scratch1 = target_map;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000163 Register scratch2 = r10;
164 Register scratch3 = r11;
165 Register scratch4 = r14;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400166
167 // Verify input registers don't conflict with locals.
168 DCHECK(!AreAliased(receiver, key, value, target_map, elements, length, array,
169 scratch2));
170
171 if (mode == TRACK_ALLOCATION_SITE) {
172 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail);
173 }
174
175 // Check for empty arrays, which only require a map transition and no changes
176 // to the backing store.
177 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
178 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
179 __ beq(&only_change_map);
180
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 __ LoadP(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
182 // length: number of elements (smi-tagged)
183
184 // Allocate new FixedDoubleArray.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 __ SmiToDoubleArrayOffset(scratch3, length);
186 __ addi(scratch3, scratch3, Operand(FixedDoubleArray::kHeaderSize));
187 __ Allocate(scratch3, array, scratch4, scratch2, fail, DOUBLE_ALIGNMENT);
188 // array: destination FixedDoubleArray, not tagged as heap object.
189 // elements: source FixedArray.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400190
191 // Set destination FixedDoubleArray's length and map.
192 __ LoadRoot(scratch2, Heap::kFixedDoubleArrayMapRootIndex);
193 __ StoreP(length, MemOperand(array, FixedDoubleArray::kLengthOffset));
194 // Update receiver's map.
195 __ StoreP(scratch2, MemOperand(array, HeapObject::kMapOffset));
196
197 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
198 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000199 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400200 OMIT_SMI_CHECK);
201 // Replace receiver's backing store with newly created FixedDoubleArray.
202 __ addi(scratch1, array, Operand(kHeapObjectTag));
203 __ StoreP(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset), r0);
204 __ RecordWriteField(receiver, JSObject::kElementsOffset, scratch1, scratch2,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000205 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400206 OMIT_SMI_CHECK);
207
208 // Prepare for conversion loop.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000209 __ addi(scratch1, elements,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400210 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000211 __ addi(scratch2, array, Operand(FixedDoubleArray::kHeaderSize));
212 __ SmiToDoubleArrayOffset(array_end, length);
213 __ add(array_end, scratch2, array_end);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400214// Repurpose registers no longer in use.
215#if V8_TARGET_ARCH_PPC64
216 Register hole_int64 = elements;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000217 __ mov(hole_int64, Operand(kHoleNanInt64));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400218#else
219 Register hole_lower = elements;
220 Register hole_upper = length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000221 __ mov(hole_lower, Operand(kHoleNanLower32));
222 __ mov(hole_upper, Operand(kHoleNanUpper32));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400223#endif
224 // scratch1: begin of source FixedArray element fields, not tagged
225 // hole_lower: kHoleNanLower32 OR hol_int64
226 // hole_upper: kHoleNanUpper32
227 // array_end: end of destination FixedDoubleArray, not tagged
228 // scratch2: begin of FixedDoubleArray element fields, not tagged
229
230 __ b(&entry);
231
232 __ bind(&only_change_map);
233 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
234 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch2,
235 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
236 OMIT_SMI_CHECK);
237 __ b(&done);
238
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400239 // Convert and copy elements.
240 __ bind(&loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000241 __ LoadP(scratch3, MemOperand(scratch1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400242 __ addi(scratch1, scratch1, Operand(kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000243 // scratch3: current element
244 __ UntagAndJumpIfNotSmi(scratch3, scratch3, &convert_hole);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400245
246 // Normal smi, convert to double and store.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247 __ ConvertIntToDouble(scratch3, d0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400248 __ stfd(d0, MemOperand(scratch2, 0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 __ addi(scratch2, scratch2, Operand(8));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400250 __ b(&entry);
251
252 // Hole found, store the-hole NaN.
253 __ bind(&convert_hole);
254 if (FLAG_debug_code) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 __ LoadP(scratch3, MemOperand(scratch1, -kPointerSize));
256 __ CompareRoot(scratch3, Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400257 __ Assert(eq, kObjectFoundInSmiOnlyArray);
258 }
259#if V8_TARGET_ARCH_PPC64
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000260 __ std(hole_int64, MemOperand(scratch2, 0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400261#else
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262 __ stw(hole_upper, MemOperand(scratch2, Register::kExponentOffset));
263 __ stw(hole_lower, MemOperand(scratch2, Register::kMantissaOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000265 __ addi(scratch2, scratch2, Operand(8));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400266
267 __ bind(&entry);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 __ cmp(scratch2, array_end);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400269 __ blt(&loop);
270
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400271 __ bind(&done);
272}
273
274
275void ElementsTransitionGenerator::GenerateDoubleToObject(
276 MacroAssembler* masm, Register receiver, Register key, Register value,
277 Register target_map, AllocationSiteMode mode, Label* fail) {
278 // Register lr contains the return address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 Label loop, convert_hole, gc_required, only_change_map;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400280 Register elements = r7;
281 Register array = r9;
282 Register length = r8;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 Register scratch = r10;
284 Register scratch3 = r11;
285 Register hole_value = r14;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400286
287 // Verify input registers don't conflict with locals.
288 DCHECK(!AreAliased(receiver, key, value, target_map, elements, array, length,
289 scratch));
290
291 if (mode == TRACK_ALLOCATION_SITE) {
292 __ JumpIfJSArrayHasAllocationMemento(receiver, elements, fail);
293 }
294
295 // Check for empty arrays, which only require a map transition and no changes
296 // to the backing store.
297 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
298 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
299 __ beq(&only_change_map);
300
301 __ Push(target_map, receiver, key, value);
302 __ LoadP(length, FieldMemOperand(elements, FixedArray::kLengthOffset));
303 // elements: source FixedDoubleArray
304 // length: number of elements (smi-tagged)
305
306 // Allocate new FixedArray.
307 // Re-use value and target_map registers, as they have been saved on the
308 // stack.
309 Register array_size = value;
310 Register allocate_scratch = target_map;
311 __ li(array_size, Operand(FixedDoubleArray::kHeaderSize));
312 __ SmiToPtrArrayOffset(r0, length);
313 __ add(array_size, array_size, r0);
314 __ Allocate(array_size, array, allocate_scratch, scratch, &gc_required,
315 NO_ALLOCATION_FLAGS);
316 // array: destination FixedArray, not tagged as heap object
317 // Set destination FixedDoubleArray's length and map.
318 __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
319 __ StoreP(length, MemOperand(array, FixedDoubleArray::kLengthOffset));
320 __ StoreP(scratch, MemOperand(array, HeapObject::kMapOffset));
321 __ addi(array, array, Operand(kHeapObjectTag));
322
323 // Prepare for conversion loop.
324 Register src_elements = elements;
325 Register dst_elements = target_map;
326 Register dst_end = length;
327 Register heap_number_map = scratch;
328 __ addi(src_elements, elements,
329 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
330 __ SmiToPtrArrayOffset(length, length);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000331 __ LoadRoot(hole_value, Heap::kTheHoleValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400332
333 Label initialization_loop, loop_done;
334 __ ShiftRightImm(r0, length, Operand(kPointerSizeLog2), SetRC);
335 __ beq(&loop_done, cr0);
336
337 // Allocating heap numbers in the loop below can fail and cause a jump to
338 // gc_required. We can't leave a partly initialized FixedArray behind,
339 // so pessimistically fill it with holes now.
340 __ mtctr(r0);
341 __ addi(dst_elements, array,
342 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
343 __ bind(&initialization_loop);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 __ StorePU(hole_value, MemOperand(dst_elements, kPointerSize));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400345 __ bdnz(&initialization_loop);
346
347 __ addi(dst_elements, array,
348 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
349 __ add(dst_end, dst_elements, length);
350 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
351 // Using offsetted addresses in src_elements to fully take advantage of
352 // post-indexing.
353 // dst_elements: begin of destination FixedArray element fields, not tagged
354 // src_elements: begin of source FixedDoubleArray element fields,
355 // not tagged, +4
356 // dst_end: end of destination FixedArray, not tagged
357 // array: destination FixedArray
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000358 // hole_value: the-hole pointer
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400359 // heap_number_map: heap number map
360 __ b(&loop);
361
362 // Call into runtime if GC is required.
363 __ bind(&gc_required);
364 __ Pop(target_map, receiver, key, value);
365 __ b(fail);
366
367 // Replace the-hole NaN with the-hole pointer.
368 __ bind(&convert_hole);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000369 __ StoreP(hole_value, MemOperand(dst_elements));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400370 __ addi(dst_elements, dst_elements, Operand(kPointerSize));
371 __ cmpl(dst_elements, dst_end);
372 __ bge(&loop_done);
373
374 __ bind(&loop);
375 Register upper_bits = key;
376 __ lwz(upper_bits, MemOperand(src_elements, Register::kExponentOffset));
377 __ addi(src_elements, src_elements, Operand(kDoubleSize));
378 // upper_bits: current element's upper 32 bit
379 // src_elements: address of next element's upper 32 bit
380 __ Cmpi(upper_bits, Operand(kHoleNanUpper32), r0);
381 __ beq(&convert_hole);
382
383 // Non-hole double, copy value into a heap number.
384 Register heap_number = receiver;
385 Register scratch2 = value;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000386 __ AllocateHeapNumber(heap_number, scratch2, scratch3, heap_number_map,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400387 &gc_required);
388 // heap_number: new heap number
389#if V8_TARGET_ARCH_PPC64
390 __ ld(scratch2, MemOperand(src_elements, -kDoubleSize));
391 // subtract tag for std
392 __ addi(upper_bits, heap_number, Operand(-kHeapObjectTag));
393 __ std(scratch2, MemOperand(upper_bits, HeapNumber::kValueOffset));
394#else
395 __ lwz(scratch2,
396 MemOperand(src_elements, Register::kMantissaOffset - kDoubleSize));
397 __ lwz(upper_bits,
398 MemOperand(src_elements, Register::kExponentOffset - kDoubleSize));
399 __ stw(scratch2, FieldMemOperand(heap_number, HeapNumber::kMantissaOffset));
400 __ stw(upper_bits, FieldMemOperand(heap_number, HeapNumber::kExponentOffset));
401#endif
402 __ mr(scratch2, dst_elements);
403 __ StoreP(heap_number, MemOperand(dst_elements));
404 __ addi(dst_elements, dst_elements, Operand(kPointerSize));
405 __ RecordWrite(array, scratch2, heap_number, kLRHasNotBeenSaved,
406 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400407 __ cmpl(dst_elements, dst_end);
408 __ blt(&loop);
409 __ bind(&loop_done);
410
411 __ Pop(target_map, receiver, key, value);
412 // Replace receiver's backing store with newly created and filled FixedArray.
413 __ StoreP(array, FieldMemOperand(receiver, JSObject::kElementsOffset), r0);
414 __ RecordWriteField(receiver, JSObject::kElementsOffset, array, scratch,
415 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
416 OMIT_SMI_CHECK);
417
418 __ bind(&only_change_map);
419 // Update receiver's map.
420 __ StoreP(target_map, FieldMemOperand(receiver, HeapObject::kMapOffset), r0);
421 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch,
422 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
423 OMIT_SMI_CHECK);
424}
425
426
427// assume ip can be used as a scratch register below
428void StringCharLoadGenerator::Generate(MacroAssembler* masm, Register string,
429 Register index, Register result,
430 Label* call_runtime) {
431 // Fetch the instance type of the receiver into result register.
432 __ LoadP(result, FieldMemOperand(string, HeapObject::kMapOffset));
433 __ lbz(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
434
435 // We need special handling for indirect strings.
436 Label check_sequential;
437 __ andi(r0, result, Operand(kIsIndirectStringMask));
438 __ beq(&check_sequential, cr0);
439
440 // Dispatch on the indirect string shape: slice or cons.
441 Label cons_string;
442 __ mov(ip, Operand(kSlicedNotConsMask));
443 __ and_(r0, result, ip, SetRC);
444 __ beq(&cons_string, cr0);
445
446 // Handle slices.
447 Label indirect_string_loaded;
448 __ LoadP(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
449 __ LoadP(string, FieldMemOperand(string, SlicedString::kParentOffset));
450 __ SmiUntag(ip, result);
451 __ add(index, index, ip);
452 __ b(&indirect_string_loaded);
453
454 // Handle cons strings.
455 // Check whether the right hand side is the empty string (i.e. if
456 // this is really a flat string in a cons string). If that is not
457 // the case we would rather go to the runtime system now to flatten
458 // the string.
459 __ bind(&cons_string);
460 __ LoadP(result, FieldMemOperand(string, ConsString::kSecondOffset));
461 __ CompareRoot(result, Heap::kempty_stringRootIndex);
462 __ bne(call_runtime);
463 // Get the first of the two strings and load its instance type.
464 __ LoadP(string, FieldMemOperand(string, ConsString::kFirstOffset));
465
466 __ bind(&indirect_string_loaded);
467 __ LoadP(result, FieldMemOperand(string, HeapObject::kMapOffset));
468 __ lbz(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
469
470 // Distinguish sequential and external strings. Only these two string
471 // representations can reach here (slices and flat cons strings have been
472 // reduced to the underlying sequential or external string).
473 Label external_string, check_encoding;
474 __ bind(&check_sequential);
475 STATIC_ASSERT(kSeqStringTag == 0);
476 __ andi(r0, result, Operand(kStringRepresentationMask));
477 __ bne(&external_string, cr0);
478
479 // Prepare sequential strings
480 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
481 __ addi(string, string,
482 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
483 __ b(&check_encoding);
484
485 // Handle external strings.
486 __ bind(&external_string);
487 if (FLAG_debug_code) {
488 // Assert that we do not have a cons or slice (indirect strings) here.
489 // Sequential strings have already been ruled out.
490 __ andi(r0, result, Operand(kIsIndirectStringMask));
491 __ Assert(eq, kExternalStringExpectedButNotFound, cr0);
492 }
493 // Rule out short external strings.
494 STATIC_ASSERT(kShortExternalStringTag != 0);
495 __ andi(r0, result, Operand(kShortExternalStringMask));
496 __ bne(call_runtime, cr0);
497 __ LoadP(string,
498 FieldMemOperand(string, ExternalString::kResourceDataOffset));
499
500 Label one_byte, done;
501 __ bind(&check_encoding);
502 STATIC_ASSERT(kTwoByteStringTag == 0);
503 __ andi(r0, result, Operand(kStringEncodingMask));
504 __ bne(&one_byte, cr0);
505 // Two-byte string.
506 __ ShiftLeftImm(result, index, Operand(1));
507 __ lhzx(result, MemOperand(string, result));
508 __ b(&done);
509 __ bind(&one_byte);
510 // One-byte string.
511 __ lbzx(result, MemOperand(string, index));
512 __ bind(&done);
513}
514
515
516static MemOperand ExpConstant(int index, Register base) {
517 return MemOperand(base, index * kDoubleSize);
518}
519
520
521void MathExpGenerator::EmitMathExp(MacroAssembler* masm, DoubleRegister input,
522 DoubleRegister result,
523 DoubleRegister double_scratch1,
524 DoubleRegister double_scratch2,
525 Register temp1, Register temp2,
526 Register temp3) {
527 DCHECK(!input.is(result));
528 DCHECK(!input.is(double_scratch1));
529 DCHECK(!input.is(double_scratch2));
530 DCHECK(!result.is(double_scratch1));
531 DCHECK(!result.is(double_scratch2));
532 DCHECK(!double_scratch1.is(double_scratch2));
533 DCHECK(!temp1.is(temp2));
534 DCHECK(!temp1.is(temp3));
535 DCHECK(!temp2.is(temp3));
536 DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
537 DCHECK(!masm->serializer_enabled()); // External references not serializable.
538
539 Label zero, infinity, done;
540
541 __ mov(temp3, Operand(ExternalReference::math_exp_constants(0)));
542
543 __ lfd(double_scratch1, ExpConstant(0, temp3));
544 __ fcmpu(double_scratch1, input);
545 __ fmr(result, input);
546 __ bunordered(&done);
547 __ bge(&zero);
548
549 __ lfd(double_scratch2, ExpConstant(1, temp3));
550 __ fcmpu(input, double_scratch2);
551 __ bge(&infinity);
552
553 __ lfd(double_scratch1, ExpConstant(3, temp3));
554 __ lfd(result, ExpConstant(4, temp3));
555 __ fmul(double_scratch1, double_scratch1, input);
556 __ fadd(double_scratch1, double_scratch1, result);
557 __ MovDoubleLowToInt(temp2, double_scratch1);
558 __ fsub(double_scratch1, double_scratch1, result);
559 __ lfd(result, ExpConstant(6, temp3));
560 __ lfd(double_scratch2, ExpConstant(5, temp3));
561 __ fmul(double_scratch1, double_scratch1, double_scratch2);
562 __ fsub(double_scratch1, double_scratch1, input);
563 __ fsub(result, result, double_scratch1);
564 __ fmul(double_scratch2, double_scratch1, double_scratch1);
565 __ fmul(result, result, double_scratch2);
566 __ lfd(double_scratch2, ExpConstant(7, temp3));
567 __ fmul(result, result, double_scratch2);
568 __ fsub(result, result, double_scratch1);
569 __ lfd(double_scratch2, ExpConstant(8, temp3));
570 __ fadd(result, result, double_scratch2);
571 __ srwi(temp1, temp2, Operand(11));
572 __ andi(temp2, temp2, Operand(0x7ff));
573 __ addi(temp1, temp1, Operand(0x3ff));
574
575 // Must not call ExpConstant() after overwriting temp3!
576 __ mov(temp3, Operand(ExternalReference::math_exp_log_table()));
577 __ slwi(temp2, temp2, Operand(3));
578#if V8_TARGET_ARCH_PPC64
579 __ ldx(temp2, MemOperand(temp3, temp2));
580 __ sldi(temp1, temp1, Operand(52));
581 __ orx(temp2, temp1, temp2);
582 __ MovInt64ToDouble(double_scratch1, temp2);
583#else
584 __ add(ip, temp3, temp2);
585 __ lwz(temp3, MemOperand(ip, Register::kExponentOffset));
586 __ lwz(temp2, MemOperand(ip, Register::kMantissaOffset));
587 __ slwi(temp1, temp1, Operand(20));
588 __ orx(temp3, temp1, temp3);
589 __ MovInt64ToDouble(double_scratch1, temp3, temp2);
590#endif
591
592 __ fmul(result, result, double_scratch1);
593 __ b(&done);
594
595 __ bind(&zero);
596 __ fmr(result, kDoubleRegZero);
597 __ b(&done);
598
599 __ bind(&infinity);
600 __ lfd(result, ExpConstant(2, temp3));
601
602 __ bind(&done);
603}
604
605#undef __
606
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
608 USE(isolate);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400609 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
610 // Since patcher is a large object, allocate it dynamically when needed,
611 // to avoid overloading the stack in stress conditions.
612 // DONT_FLUSH is used because the CodeAgingHelper is initialized early in
613 // the process, before ARM simulator ICache is setup.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614 base::SmartPointer<CodePatcher> patcher(
615 new CodePatcher(isolate, young_sequence_.start(),
616 young_sequence_.length() / Assembler::kInstrSize,
617 CodePatcher::DONT_FLUSH));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400618 PredictableCodeSizeScope scope(patcher->masm(), young_sequence_.length());
619 patcher->masm()->PushFixedFrame(r4);
620 patcher->masm()->addi(fp, sp,
621 Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
622 for (int i = 0; i < kNoCodeAgeSequenceNops; i++) {
623 patcher->masm()->nop();
624 }
625}
626
627
628#ifdef DEBUG
629bool CodeAgingHelper::IsOld(byte* candidate) const {
630 return Assembler::IsNop(Assembler::instr_at(candidate));
631}
632#endif
633
634
635bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
636 bool result = isolate->code_aging_helper()->IsYoung(sequence);
637 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
638 return result;
639}
640
641
642void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
643 MarkingParity* parity) {
644 if (IsYoungSequence(isolate, sequence)) {
645 *age = kNoAgeCodeAge;
646 *parity = NO_MARKING_PARITY;
647 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000648 Code* code = NULL;
649 Address target_address =
650 Assembler::target_address_at(sequence + kCodeAgingTargetDelta, code);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400651 Code* stub = GetCodeFromTargetAddress(target_address);
652 GetCodeAgeAndParity(stub, age, parity);
653 }
654}
655
656
657void Code::PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Code::Age age,
658 MarkingParity parity) {
659 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
660 if (age == kNoAgeCodeAge) {
661 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662 Assembler::FlushICache(isolate, sequence, young_length);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400663 } else {
664 // FIXED_SEQUENCE
665 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000666 CodePatcher patcher(isolate, sequence,
667 young_length / Assembler::kInstrSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400668 Assembler::BlockTrampolinePoolScope block_trampoline_pool(patcher.masm());
669 intptr_t target = reinterpret_cast<intptr_t>(stub->instruction_start());
670 // Don't use Call -- we need to preserve ip and lr.
671 // GenerateMakeCodeYoungAgainCommon for the stub code.
672 patcher.masm()->nop(); // marker to detect sequence (see IsOld)
673 patcher.masm()->mov(r3, Operand(target));
674 patcher.masm()->Jump(r3);
675 for (int i = 0; i < kCodeAgingSequenceNops; i++) {
676 patcher.masm()->nop();
677 }
678 }
679}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680} // namespace internal
681} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400682
683#endif // V8_TARGET_ARCH_PPC