blob: 33e987e248830c57e8a0443147809c1cb6c715ac [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/x64/codegen-x64.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/codegen.h"
10#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000011
12namespace v8 {
13namespace internal {
14
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010015// -------------------------------------------------------------------------
16// Platform-specific RuntimeCallHelper functions.
17
Ben Murdochb0fe1622011-05-05 13:52:32 +010018void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010019 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000020 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010021 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010022}
23
24
Ben Murdochb0fe1622011-05-05 13:52:32 +010025void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000027 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010028 masm->set_has_frame(false);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010029}
30
31
Steve Block3ce2e202009-11-05 08:53:23 +000032#define __ masm.
33
Ben Murdoch3ef787d2012-04-12 10:51:47 +010034
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035UnaryMathFunctionWithIsolate CreateExpFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037 byte* buffer =
38 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000039 if (buffer == nullptr) return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 ExternalReference::InitializeMathExpData();
Ben Murdoch3ef787d2012-04-12 10:51:47 +010041
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
43 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010044 // xmm0: raw double input.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 XMMRegister input = xmm0;
46 XMMRegister result = xmm1;
47 __ pushq(rax);
48 __ pushq(rbx);
49
50 MathExpGenerator::EmitMathExp(&masm, input, result, xmm2, rax, rbx);
51
52 __ popq(rbx);
53 __ popq(rax);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 __ Movsd(xmm0, result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010055 __ Ret();
56
57 CodeDesc desc;
58 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010060
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000062 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010064}
65
66
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000067UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010068 size_t actual_size;
69 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070 byte* buffer =
71 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000072 if (buffer == nullptr) return nullptr;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010073
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000074 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
75 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010076 // xmm0: raw double input.
77 // Move double input into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 __ Sqrtsd(xmm0, xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010079 __ Ret();
80
81 CodeDesc desc;
82 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000083 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010084
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000085 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000087 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010088}
89
Ben Murdoch3ef787d2012-04-12 10:51:47 +010090#undef __
91
92// -------------------------------------------------------------------------
93// Code generators
94
95#define __ ACCESS_MASM(masm)
96
Ben Murdochb8a8cc12014-11-26 15:28:44 +000097void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
98 MacroAssembler* masm,
99 Register receiver,
100 Register key,
101 Register value,
102 Register target_map,
103 AllocationSiteMode mode,
104 Label* allocation_memento_found) {
105 // Return address is on the stack.
106 Register scratch = rdi;
107 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
108
109 if (mode == TRACK_ALLOCATION_SITE) {
110 DCHECK(allocation_memento_found != NULL);
111 __ JumpIfJSArrayHasAllocationMemento(
112 receiver, scratch, allocation_memento_found);
113 }
114
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100115 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000116 __ movp(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
117 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100118 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 target_map,
120 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100121 kDontSaveFPRegs,
122 EMIT_REMEMBERED_SET,
123 OMIT_SMI_CHECK);
124}
125
126
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127void ElementsTransitionGenerator::GenerateSmiToDouble(
128 MacroAssembler* masm,
129 Register receiver,
130 Register key,
131 Register value,
132 Register target_map,
133 AllocationSiteMode mode,
134 Label* fail) {
135 // Return address is on the stack.
136 DCHECK(receiver.is(rdx));
137 DCHECK(key.is(rcx));
138 DCHECK(value.is(rax));
139 DCHECK(target_map.is(rbx));
140
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100141 // The fail label is not actually used since we do not allocate.
142 Label allocated, new_backing_store, only_change_map, done;
143
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 if (mode == TRACK_ALLOCATION_SITE) {
145 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail);
146 }
147
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100148 // Check for empty arrays, which only require a map transition and no changes
149 // to the backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100151 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex);
152 __ j(equal, &only_change_map);
153
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100154 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000155 if (kPointerSize == kDoubleSize) {
156 // Check backing store for COW-ness. For COW arrays we have to
157 // allocate a new backing store.
158 __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset),
159 Heap::kFixedCOWArrayMapRootIndex);
160 __ j(equal, &new_backing_store);
161 } else {
162 // For x32 port we have to allocate a new backing store as SMI size is
163 // not equal with double size.
164 DCHECK(kDoubleSize == 2 * kPointerSize);
165 __ jmp(&new_backing_store);
166 }
167
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100168 // Check if the backing store is in new-space. If not, we need to allocate
169 // a new one since the old one is in pointer-space.
170 // If in new space, we can reuse the old backing store because it is
171 // the same size.
172 __ JumpIfNotInNewSpace(r8, rdi, &new_backing_store);
173
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 __ movp(r14, r8); // Destination array equals source array.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100175
176 // r8 : source FixedArray
177 // r9 : elements array length
178 // r14: destination FixedDoubleArray
179 // Set backing store's map
180 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100182
183 __ bind(&allocated);
184 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100186 __ RecordWriteField(rdx,
187 HeapObject::kMapOffset,
188 rbx,
189 rdi,
190 kDontSaveFPRegs,
191 EMIT_REMEMBERED_SET,
192 OMIT_SMI_CHECK);
193
194 // Convert smis to doubles and holes to hole NaNs. The Array's length
195 // remains unchanged.
196 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset);
197 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
198
199 Label loop, entry, convert_hole;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000200 __ movq(r15, bit_cast<int64_t, uint64_t>(kHoleNanInt64));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100201 // r15: the-hole NaN
202 __ jmp(&entry);
203
204 // Allocate new backing store.
205 __ bind(&new_backing_store);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000206 __ leap(rdi, Operand(r9, times_8, FixedArray::kHeaderSize));
207 __ Allocate(rdi, r14, r11, r15, fail, TAG_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100208 // Set backing store's map
209 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000210 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100211 // Set receiver's backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 __ movp(FieldOperand(rdx, JSObject::kElementsOffset), r14);
213 __ movp(r11, r14);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100214 __ RecordWriteField(rdx,
215 JSObject::kElementsOffset,
216 r11,
217 r15,
218 kDontSaveFPRegs,
219 EMIT_REMEMBERED_SET,
220 OMIT_SMI_CHECK);
221 // Set backing store's length.
222 __ Integer32ToSmi(r11, r9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 __ movp(FieldOperand(r14, FixedDoubleArray::kLengthOffset), r11);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100224 __ jmp(&allocated);
225
226 __ bind(&only_change_map);
227 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000228 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100229 __ RecordWriteField(rdx,
230 HeapObject::kMapOffset,
231 rbx,
232 rdi,
233 kDontSaveFPRegs,
234 OMIT_REMEMBERED_SET,
235 OMIT_SMI_CHECK);
236 __ jmp(&done);
237
238 // Conversion loop.
239 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000240 __ movp(rbx,
241 FieldOperand(r8, r9, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100242 // r9 : current element's index
243 // rbx: current element (smi-tagged)
244 __ JumpIfNotSmi(rbx, &convert_hole);
245 __ SmiToInteger32(rbx, rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246 __ Cvtlsi2sd(xmm0, rbx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247 __ Movsd(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100248 __ jmp(&entry);
249 __ bind(&convert_hole);
250
251 if (FLAG_debug_code) {
252 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253 __ Assert(equal, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100254 }
255
256 __ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15);
257 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 __ decp(r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100259 __ j(not_sign, &loop);
260
261 __ bind(&done);
262}
263
264
265void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000266 MacroAssembler* masm,
267 Register receiver,
268 Register key,
269 Register value,
270 Register target_map,
271 AllocationSiteMode mode,
272 Label* fail) {
273 // Return address is on the stack.
274 DCHECK(receiver.is(rdx));
275 DCHECK(key.is(rcx));
276 DCHECK(value.is(rax));
277 DCHECK(target_map.is(rbx));
278
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100279 Label loop, entry, convert_hole, gc_required, only_change_map;
280
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000281 if (mode == TRACK_ALLOCATION_SITE) {
282 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail);
283 }
284
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100285 // Check for empty arrays, which only require a map transition and no changes
286 // to the backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000287 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100288 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex);
289 __ j(equal, &only_change_map);
290
Ben Murdochda12d292016-06-02 14:46:10 +0100291 __ Push(rsi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000292 __ Push(rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100293
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000294 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100295 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
296 // r8 : source FixedDoubleArray
297 // r9 : number of elements
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 __ leap(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize));
299 __ Allocate(rdi, r11, r14, r15, &gc_required, TAG_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100300 // r11: destination FixedArray
301 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000302 __ movp(FieldOperand(r11, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100303 __ Integer32ToSmi(r14, r9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 __ movp(FieldOperand(r11, FixedArray::kLengthOffset), r14);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100305
306 // Prepare for conversion loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307 __ movq(rsi, bit_cast<int64_t, uint64_t>(kHoleNanInt64));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100308 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex);
309 // rsi: the-hole NaN
310 // rdi: pointer to the-hole
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400311
312 // Allocating heap numbers in the loop below can fail and cause a jump to
313 // gc_required. We can't leave a partly initialized FixedArray behind,
314 // so pessimistically fill it with holes now.
315 Label initialization_loop, initialization_loop_entry;
316 __ jmp(&initialization_loop_entry, Label::kNear);
317 __ bind(&initialization_loop);
318 __ movp(FieldOperand(r11, r9, times_pointer_size, FixedArray::kHeaderSize),
319 rdi);
320 __ bind(&initialization_loop_entry);
321 __ decp(r9);
322 __ j(not_sign, &initialization_loop);
323
324 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100325 __ jmp(&entry);
326
327 // Call into runtime if GC is required.
328 __ bind(&gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 __ Pop(rax);
Ben Murdochda12d292016-06-02 14:46:10 +0100330 __ Pop(rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100331 __ jmp(fail);
332
333 // Box doubles into heap numbers.
334 __ bind(&loop);
335 __ movq(r14, FieldOperand(r8,
336 r9,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000337 times_8,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100338 FixedDoubleArray::kHeaderSize));
339 // r9 : current element's index
340 // r14: current element
341 __ cmpq(r14, rsi);
342 __ j(equal, &convert_hole);
343
344 // Non-hole double, copy value into a heap number.
345 __ AllocateHeapNumber(rax, r15, &gc_required);
346 // rax: new heap number
347 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), r14);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000348 __ movp(FieldOperand(r11,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100349 r9,
350 times_pointer_size,
351 FixedArray::kHeaderSize),
352 rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000353 __ movp(r15, r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100354 __ RecordWriteArray(r11,
355 rax,
356 r15,
357 kDontSaveFPRegs,
358 EMIT_REMEMBERED_SET,
359 OMIT_SMI_CHECK);
360 __ jmp(&entry, Label::kNear);
361
362 // Replace the-hole NaN with the-hole pointer.
363 __ bind(&convert_hole);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000364 __ movp(FieldOperand(r11,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100365 r9,
366 times_pointer_size,
367 FixedArray::kHeaderSize),
368 rdi);
369
370 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371 __ decp(r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100372 __ j(not_sign, &loop);
373
374 // Replace receiver's backing store with newly created and filled FixedArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000375 __ movp(FieldOperand(rdx, JSObject::kElementsOffset), r11);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100376 __ RecordWriteField(rdx,
377 JSObject::kElementsOffset,
378 r11,
379 r15,
380 kDontSaveFPRegs,
381 EMIT_REMEMBERED_SET,
382 OMIT_SMI_CHECK);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000383 __ Pop(rax);
Ben Murdochda12d292016-06-02 14:46:10 +0100384 __ Pop(rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100385
386 __ bind(&only_change_map);
387 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000388 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100389 __ RecordWriteField(rdx,
390 HeapObject::kMapOffset,
391 rbx,
392 rdi,
393 kDontSaveFPRegs,
394 OMIT_REMEMBERED_SET,
395 OMIT_SMI_CHECK);
396}
397
398
399void StringCharLoadGenerator::Generate(MacroAssembler* masm,
400 Register string,
401 Register index,
402 Register result,
403 Label* call_runtime) {
404 // Fetch the instance type of the receiver into result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000405 __ movp(result, FieldOperand(string, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100406 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
407
408 // We need special handling for indirect strings.
409 Label check_sequential;
410 __ testb(result, Immediate(kIsIndirectStringMask));
411 __ j(zero, &check_sequential, Label::kNear);
412
413 // Dispatch on the indirect string shape: slice or cons.
414 Label cons_string;
415 __ testb(result, Immediate(kSlicedNotConsMask));
416 __ j(zero, &cons_string, Label::kNear);
417
418 // Handle slices.
419 Label indirect_string_loaded;
420 __ SmiToInteger32(result, FieldOperand(string, SlicedString::kOffsetOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000421 __ addp(index, result);
422 __ movp(string, FieldOperand(string, SlicedString::kParentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100423 __ jmp(&indirect_string_loaded, Label::kNear);
424
425 // Handle cons strings.
426 // Check whether the right hand side is the empty string (i.e. if
427 // this is really a flat string in a cons string). If that is not
428 // the case we would rather go to the runtime system now to flatten
429 // the string.
430 __ bind(&cons_string);
431 __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100433 __ j(not_equal, call_runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 __ movp(string, FieldOperand(string, ConsString::kFirstOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435
436 __ bind(&indirect_string_loaded);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437 __ movp(result, FieldOperand(string, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100438 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
439
440 // Distinguish sequential and external strings. Only these two string
441 // representations can reach here (slices and flat cons strings have been
442 // reduced to the underlying sequential or external string).
443 Label seq_string;
444 __ bind(&check_sequential);
445 STATIC_ASSERT(kSeqStringTag == 0);
446 __ testb(result, Immediate(kStringRepresentationMask));
447 __ j(zero, &seq_string, Label::kNear);
448
449 // Handle external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 Label one_byte_external, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100451 if (FLAG_debug_code) {
452 // Assert that we do not have a cons or slice (indirect strings) here.
453 // Sequential strings have already been ruled out.
454 __ testb(result, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100456 }
457 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100459 __ testb(result, Immediate(kShortExternalStringTag));
460 __ j(not_zero, call_runtime);
461 // Check encoding.
462 STATIC_ASSERT(kTwoByteStringTag == 0);
463 __ testb(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000464 __ movp(result, FieldOperand(string, ExternalString::kResourceDataOffset));
465 __ j(not_equal, &one_byte_external, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100466 // Two-byte string.
467 __ movzxwl(result, Operand(result, index, times_2, 0));
468 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 __ bind(&one_byte_external);
470 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100471 __ movzxbl(result, Operand(result, index, times_1, 0));
472 __ jmp(&done, Label::kNear);
473
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474 // Dispatch on the encoding: one-byte or two-byte.
475 Label one_byte;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100476 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100478 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
479 __ testb(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480 __ j(not_zero, &one_byte, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100481
482 // Two-byte string.
483 // Load the two-byte character code into the result register.
484 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
485 __ movzxwl(result, FieldOperand(string,
486 index,
487 times_2,
488 SeqTwoByteString::kHeaderSize));
489 __ jmp(&done, Label::kNear);
490
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000491 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100492 // Load the byte into the result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493 __ bind(&one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100494 __ movzxbl(result, FieldOperand(string,
495 index,
496 times_1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000497 SeqOneByteString::kHeaderSize));
498 __ bind(&done);
499}
500
501
502void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
503 XMMRegister input,
504 XMMRegister result,
505 XMMRegister double_scratch,
506 Register temp1,
507 Register temp2) {
508 DCHECK(!input.is(result));
509 DCHECK(!input.is(double_scratch));
510 DCHECK(!result.is(double_scratch));
511 DCHECK(!temp1.is(temp2));
512 DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
513 DCHECK(!masm->serializer_enabled()); // External references not serializable.
514
515 Label done;
516
517 __ Move(kScratchRegister, ExternalReference::math_exp_constants(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518 __ Movsd(double_scratch, Operand(kScratchRegister, 0 * kDoubleSize));
519 __ Xorpd(result, result);
520 __ Ucomisd(double_scratch, input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521 __ j(above_equal, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000522 __ Ucomisd(input, Operand(kScratchRegister, 1 * kDoubleSize));
523 __ Movsd(result, Operand(kScratchRegister, 2 * kDoubleSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 __ j(above_equal, &done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000525 __ Movsd(double_scratch, Operand(kScratchRegister, 3 * kDoubleSize));
526 __ Movsd(result, Operand(kScratchRegister, 4 * kDoubleSize));
527 __ Mulsd(double_scratch, input);
528 __ Addsd(double_scratch, result);
529 __ Movq(temp2, double_scratch);
530 __ Subsd(double_scratch, result);
531 __ Movsd(result, Operand(kScratchRegister, 6 * kDoubleSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532 __ leaq(temp1, Operand(temp2, 0x1ff800));
533 __ andq(temp2, Immediate(0x7ff));
534 __ shrq(temp1, Immediate(11));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535 __ Mulsd(double_scratch, Operand(kScratchRegister, 5 * kDoubleSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536 __ Move(kScratchRegister, ExternalReference::math_exp_log_table());
537 __ shlq(temp1, Immediate(52));
538 __ orq(temp1, Operand(kScratchRegister, temp2, times_8, 0));
539 __ Move(kScratchRegister, ExternalReference::math_exp_constants(0));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 __ Subsd(double_scratch, input);
541 __ Movsd(input, double_scratch);
542 __ Subsd(result, double_scratch);
543 __ Mulsd(input, double_scratch);
544 __ Mulsd(result, input);
545 __ Movq(input, temp1);
546 __ Mulsd(result, Operand(kScratchRegister, 7 * kDoubleSize));
547 __ Subsd(result, double_scratch);
548 __ Addsd(result, Operand(kScratchRegister, 8 * kDoubleSize));
549 __ Mulsd(result, input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100551 __ bind(&done);
552}
Leon Clarkee46be812010-01-19 14:06:41 +0000553
Steve Blocka7e24c12009-10-30 11:49:00 +0000554#undef __
555
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000556
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000557CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
558 USE(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
560 // The sequence of instructions that is patched out for aging code is the
561 // following boilerplate stack-building prologue that is found both in
562 // FUNCTION and OPTIMIZED_FUNCTION code:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563 CodePatcher patcher(isolate, young_sequence_.start(),
564 young_sequence_.length());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000565 patcher.masm()->pushq(rbp);
566 patcher.masm()->movp(rbp, rsp);
567 patcher.masm()->Push(rsi);
568 patcher.masm()->Push(rdi);
569}
570
571
572#ifdef DEBUG
573bool CodeAgingHelper::IsOld(byte* candidate) const {
574 return *candidate == kCallOpcode;
575}
576#endif
577
578
579bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
580 bool result = isolate->code_aging_helper()->IsYoung(sequence);
581 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
582 return result;
583}
584
585
586void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
587 MarkingParity* parity) {
588 if (IsYoungSequence(isolate, sequence)) {
589 *age = kNoAgeCodeAge;
590 *parity = NO_MARKING_PARITY;
591 } else {
592 sequence++; // Skip the kCallOpcode byte
593 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
594 Assembler::kCallTargetAddressOffset;
595 Code* stub = GetCodeFromTargetAddress(target_address);
596 GetCodeAgeAndParity(stub, age, parity);
597 }
598}
599
600
601void Code::PatchPlatformCodeAge(Isolate* isolate,
602 byte* sequence,
603 Code::Age age,
604 MarkingParity parity) {
605 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
606 if (age == kNoAgeCodeAge) {
607 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000608 Assembler::FlushICache(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609 } else {
610 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000611 CodePatcher patcher(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612 patcher.masm()->call(stub->instruction_start());
613 patcher.masm()->Nop(
614 kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
615 }
616}
617
618
619Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
620 DCHECK(index >= 0);
621 int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
622 int displacement_to_last_argument = base_reg_.is(rsp) ?
623 kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
624 displacement_to_last_argument += extra_displacement_to_last_argument_;
625 if (argument_count_reg_.is(no_reg)) {
626 // argument[0] is at base_reg_ + displacement_to_last_argument +
627 // (argument_count_immediate_ + receiver - 1) * kPointerSize.
628 DCHECK(argument_count_immediate_ + receiver > 0);
629 return Operand(base_reg_, displacement_to_last_argument +
630 (argument_count_immediate_ + receiver - 1 - index) * kPointerSize);
631 } else {
632 // argument[0] is at base_reg_ + displacement_to_last_argument +
633 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize.
634 return Operand(base_reg_, argument_count_reg_, times_pointer_size,
635 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize);
636 }
637}
638
639
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000640} // namespace internal
641} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +0100642
643#endif // V8_TARGET_ARCH_X64