blob: 911f3cb64aa10d229629b8611a8a57974ffd79d8 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/x64/codegen-x64.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_X64
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/codegen.h"
10#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000011
12namespace v8 {
13namespace internal {
14
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010015// -------------------------------------------------------------------------
16// Platform-specific RuntimeCallHelper functions.
17
Ben Murdochb0fe1622011-05-05 13:52:32 +010018void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010019 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000020 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010021 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010022}
23
24
Ben Murdochb0fe1622011-05-05 13:52:32 +010025void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010026 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000027 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010028 masm->set_has_frame(false);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010029}
30
31
Steve Block3ce2e202009-11-05 08:53:23 +000032#define __ masm.
33
Ben Murdoch3ef787d2012-04-12 10:51:47 +010034
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036 size_t actual_size;
37 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038 byte* buffer =
39 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000040 if (buffer == nullptr) return nullptr;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010041
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
43 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010044 // xmm0: raw double input.
45 // Move double input into registers.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 __ Sqrtsd(xmm0, xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010047 __ Ret();
48
49 CodeDesc desc;
50 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010052
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000055 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010056}
57
Ben Murdoch3ef787d2012-04-12 10:51:47 +010058#undef __
59
60// -------------------------------------------------------------------------
61// Code generators
62
63#define __ ACCESS_MASM(masm)
64
Ben Murdochb8a8cc12014-11-26 15:28:44 +000065void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
66 MacroAssembler* masm,
67 Register receiver,
68 Register key,
69 Register value,
70 Register target_map,
71 AllocationSiteMode mode,
72 Label* allocation_memento_found) {
73 // Return address is on the stack.
74 Register scratch = rdi;
75 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
76
77 if (mode == TRACK_ALLOCATION_SITE) {
78 DCHECK(allocation_memento_found != NULL);
79 __ JumpIfJSArrayHasAllocationMemento(
80 receiver, scratch, allocation_memento_found);
81 }
82
Ben Murdoch3ef787d2012-04-12 10:51:47 +010083 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084 __ movp(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
85 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +010086 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087 target_map,
88 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +010089 kDontSaveFPRegs,
90 EMIT_REMEMBERED_SET,
91 OMIT_SMI_CHECK);
92}
93
94
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095void ElementsTransitionGenerator::GenerateSmiToDouble(
96 MacroAssembler* masm,
97 Register receiver,
98 Register key,
99 Register value,
100 Register target_map,
101 AllocationSiteMode mode,
102 Label* fail) {
103 // Return address is on the stack.
104 DCHECK(receiver.is(rdx));
105 DCHECK(key.is(rcx));
106 DCHECK(value.is(rax));
107 DCHECK(target_map.is(rbx));
108
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100109 // The fail label is not actually used since we do not allocate.
110 Label allocated, new_backing_store, only_change_map, done;
111
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112 if (mode == TRACK_ALLOCATION_SITE) {
113 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail);
114 }
115
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100116 // Check for empty arrays, which only require a map transition and no changes
117 // to the backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000118 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100119 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex);
120 __ j(equal, &only_change_map);
121
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100122 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 if (kPointerSize == kDoubleSize) {
124 // Check backing store for COW-ness. For COW arrays we have to
125 // allocate a new backing store.
126 __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset),
127 Heap::kFixedCOWArrayMapRootIndex);
128 __ j(equal, &new_backing_store);
129 } else {
130 // For x32 port we have to allocate a new backing store as SMI size is
131 // not equal with double size.
132 DCHECK(kDoubleSize == 2 * kPointerSize);
133 __ jmp(&new_backing_store);
134 }
135
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100136 // Check if the backing store is in new-space. If not, we need to allocate
137 // a new one since the old one is in pointer-space.
138 // If in new space, we can reuse the old backing store because it is
139 // the same size.
140 __ JumpIfNotInNewSpace(r8, rdi, &new_backing_store);
141
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 __ movp(r14, r8); // Destination array equals source array.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100143
144 // r8 : source FixedArray
145 // r9 : elements array length
146 // r14: destination FixedDoubleArray
147 // Set backing store's map
148 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100150
151 __ bind(&allocated);
152 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100154 __ RecordWriteField(rdx,
155 HeapObject::kMapOffset,
156 rbx,
157 rdi,
158 kDontSaveFPRegs,
159 EMIT_REMEMBERED_SET,
160 OMIT_SMI_CHECK);
161
162 // Convert smis to doubles and holes to hole NaNs. The Array's length
163 // remains unchanged.
164 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset);
165 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
166
167 Label loop, entry, convert_hole;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168 __ movq(r15, bit_cast<int64_t, uint64_t>(kHoleNanInt64));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100169 // r15: the-hole NaN
170 __ jmp(&entry);
171
172 // Allocate new backing store.
173 __ bind(&new_backing_store);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 __ leap(rdi, Operand(r9, times_8, FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +0100175 __ Allocate(rdi, r14, r11, r15, fail, NO_ALLOCATION_FLAGS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100176 // Set backing store's map
177 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100179 // Set receiver's backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 __ movp(FieldOperand(rdx, JSObject::kElementsOffset), r14);
181 __ movp(r11, r14);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100182 __ RecordWriteField(rdx,
183 JSObject::kElementsOffset,
184 r11,
185 r15,
186 kDontSaveFPRegs,
187 EMIT_REMEMBERED_SET,
188 OMIT_SMI_CHECK);
189 // Set backing store's length.
190 __ Integer32ToSmi(r11, r9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191 __ movp(FieldOperand(r14, FixedDoubleArray::kLengthOffset), r11);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100192 __ jmp(&allocated);
193
194 __ bind(&only_change_map);
195 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000196 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100197 __ RecordWriteField(rdx,
198 HeapObject::kMapOffset,
199 rbx,
200 rdi,
201 kDontSaveFPRegs,
202 OMIT_REMEMBERED_SET,
203 OMIT_SMI_CHECK);
204 __ jmp(&done);
205
206 // Conversion loop.
207 __ bind(&loop);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000208 __ movp(rbx,
209 FieldOperand(r8, r9, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100210 // r9 : current element's index
211 // rbx: current element (smi-tagged)
212 __ JumpIfNotSmi(rbx, &convert_hole);
213 __ SmiToInteger32(rbx, rbx);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100214 __ Cvtlsi2sd(kScratchDoubleReg, rbx);
215 __ Movsd(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize),
216 kScratchDoubleReg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100217 __ jmp(&entry);
218 __ bind(&convert_hole);
219
220 if (FLAG_debug_code) {
221 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 __ Assert(equal, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100223 }
224
225 __ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15);
226 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227 __ decp(r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100228 __ j(not_sign, &loop);
229
230 __ bind(&done);
231}
232
233
234void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000235 MacroAssembler* masm,
236 Register receiver,
237 Register key,
238 Register value,
239 Register target_map,
240 AllocationSiteMode mode,
241 Label* fail) {
242 // Return address is on the stack.
243 DCHECK(receiver.is(rdx));
244 DCHECK(key.is(rcx));
245 DCHECK(value.is(rax));
246 DCHECK(target_map.is(rbx));
247
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100248 Label loop, entry, convert_hole, gc_required, only_change_map;
249
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 if (mode == TRACK_ALLOCATION_SITE) {
251 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail);
252 }
253
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100254 // Check for empty arrays, which only require a map transition and no changes
255 // to the backing store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100257 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex);
258 __ j(equal, &only_change_map);
259
Ben Murdochda12d292016-06-02 14:46:10 +0100260 __ Push(rsi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000261 __ Push(rax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100262
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100264 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
265 // r8 : source FixedDoubleArray
266 // r9 : number of elements
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267 __ leap(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +0100268 __ Allocate(rdi, r11, r14, r15, &gc_required, NO_ALLOCATION_FLAGS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100269 // r11: destination FixedArray
270 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271 __ movp(FieldOperand(r11, HeapObject::kMapOffset), rdi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100272 __ Integer32ToSmi(r14, r9);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000273 __ movp(FieldOperand(r11, FixedArray::kLengthOffset), r14);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100274
275 // Prepare for conversion loop.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276 __ movq(rsi, bit_cast<int64_t, uint64_t>(kHoleNanInt64));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100277 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex);
278 // rsi: the-hole NaN
279 // rdi: pointer to the-hole
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400280
281 // Allocating heap numbers in the loop below can fail and cause a jump to
282 // gc_required. We can't leave a partly initialized FixedArray behind,
283 // so pessimistically fill it with holes now.
284 Label initialization_loop, initialization_loop_entry;
285 __ jmp(&initialization_loop_entry, Label::kNear);
286 __ bind(&initialization_loop);
287 __ movp(FieldOperand(r11, r9, times_pointer_size, FixedArray::kHeaderSize),
288 rdi);
289 __ bind(&initialization_loop_entry);
290 __ decp(r9);
291 __ j(not_sign, &initialization_loop);
292
293 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100294 __ jmp(&entry);
295
296 // Call into runtime if GC is required.
297 __ bind(&gc_required);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 __ Pop(rax);
Ben Murdochda12d292016-06-02 14:46:10 +0100299 __ Pop(rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100300 __ jmp(fail);
301
302 // Box doubles into heap numbers.
303 __ bind(&loop);
304 __ movq(r14, FieldOperand(r8,
305 r9,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306 times_8,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100307 FixedDoubleArray::kHeaderSize));
308 // r9 : current element's index
309 // r14: current element
310 __ cmpq(r14, rsi);
311 __ j(equal, &convert_hole);
312
313 // Non-hole double, copy value into a heap number.
314 __ AllocateHeapNumber(rax, r15, &gc_required);
315 // rax: new heap number
316 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), r14);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000317 __ movp(FieldOperand(r11,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100318 r9,
319 times_pointer_size,
320 FixedArray::kHeaderSize),
321 rax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 __ movp(r15, r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100323 __ RecordWriteArray(r11,
324 rax,
325 r15,
326 kDontSaveFPRegs,
327 EMIT_REMEMBERED_SET,
328 OMIT_SMI_CHECK);
329 __ jmp(&entry, Label::kNear);
330
331 // Replace the-hole NaN with the-hole pointer.
332 __ bind(&convert_hole);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333 __ movp(FieldOperand(r11,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100334 r9,
335 times_pointer_size,
336 FixedArray::kHeaderSize),
337 rdi);
338
339 __ bind(&entry);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000340 __ decp(r9);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100341 __ j(not_sign, &loop);
342
343 // Replace receiver's backing store with newly created and filled FixedArray.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000344 __ movp(FieldOperand(rdx, JSObject::kElementsOffset), r11);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100345 __ RecordWriteField(rdx,
346 JSObject::kElementsOffset,
347 r11,
348 r15,
349 kDontSaveFPRegs,
350 EMIT_REMEMBERED_SET,
351 OMIT_SMI_CHECK);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000352 __ Pop(rax);
Ben Murdochda12d292016-06-02 14:46:10 +0100353 __ Pop(rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100354
355 __ bind(&only_change_map);
356 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100358 __ RecordWriteField(rdx,
359 HeapObject::kMapOffset,
360 rbx,
361 rdi,
362 kDontSaveFPRegs,
363 OMIT_REMEMBERED_SET,
364 OMIT_SMI_CHECK);
365}
366
367
368void StringCharLoadGenerator::Generate(MacroAssembler* masm,
369 Register string,
370 Register index,
371 Register result,
372 Label* call_runtime) {
373 // Fetch the instance type of the receiver into result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000374 __ movp(result, FieldOperand(string, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100375 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
376
377 // We need special handling for indirect strings.
378 Label check_sequential;
379 __ testb(result, Immediate(kIsIndirectStringMask));
380 __ j(zero, &check_sequential, Label::kNear);
381
382 // Dispatch on the indirect string shape: slice or cons.
383 Label cons_string;
384 __ testb(result, Immediate(kSlicedNotConsMask));
385 __ j(zero, &cons_string, Label::kNear);
386
387 // Handle slices.
388 Label indirect_string_loaded;
389 __ SmiToInteger32(result, FieldOperand(string, SlicedString::kOffsetOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000390 __ addp(index, result);
391 __ movp(string, FieldOperand(string, SlicedString::kParentOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100392 __ jmp(&indirect_string_loaded, Label::kNear);
393
394 // Handle cons strings.
395 // Check whether the right hand side is the empty string (i.e. if
396 // this is really a flat string in a cons string). If that is not
397 // the case we would rather go to the runtime system now to flatten
398 // the string.
399 __ bind(&cons_string);
400 __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000401 Heap::kempty_stringRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100402 __ j(not_equal, call_runtime);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000403 __ movp(string, FieldOperand(string, ConsString::kFirstOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100404
405 __ bind(&indirect_string_loaded);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 __ movp(result, FieldOperand(string, HeapObject::kMapOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100407 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
408
409 // Distinguish sequential and external strings. Only these two string
410 // representations can reach here (slices and flat cons strings have been
411 // reduced to the underlying sequential or external string).
412 Label seq_string;
413 __ bind(&check_sequential);
414 STATIC_ASSERT(kSeqStringTag == 0);
415 __ testb(result, Immediate(kStringRepresentationMask));
416 __ j(zero, &seq_string, Label::kNear);
417
418 // Handle external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000419 Label one_byte_external, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100420 if (FLAG_debug_code) {
421 // Assert that we do not have a cons or slice (indirect strings) here.
422 // Sequential strings have already been ruled out.
423 __ testb(result, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000424 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100425 }
426 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100428 __ testb(result, Immediate(kShortExternalStringTag));
429 __ j(not_zero, call_runtime);
430 // Check encoding.
431 STATIC_ASSERT(kTwoByteStringTag == 0);
432 __ testb(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000433 __ movp(result, FieldOperand(string, ExternalString::kResourceDataOffset));
434 __ j(not_equal, &one_byte_external, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100435 // Two-byte string.
436 __ movzxwl(result, Operand(result, index, times_2, 0));
437 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000438 __ bind(&one_byte_external);
439 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100440 __ movzxbl(result, Operand(result, index, times_1, 0));
441 __ jmp(&done, Label::kNear);
442
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443 // Dispatch on the encoding: one-byte or two-byte.
444 Label one_byte;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100445 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100447 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
448 __ testb(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 __ j(not_zero, &one_byte, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100450
451 // Two-byte string.
452 // Load the two-byte character code into the result register.
453 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
454 __ movzxwl(result, FieldOperand(string,
455 index,
456 times_2,
457 SeqTwoByteString::kHeaderSize));
458 __ jmp(&done, Label::kNear);
459
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100461 // Load the byte into the result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 __ bind(&one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100463 __ movzxbl(result, FieldOperand(string,
464 index,
465 times_1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 SeqOneByteString::kHeaderSize));
467 __ bind(&done);
468}
469
Steve Blocka7e24c12009-10-30 11:49:00 +0000470#undef __
471
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000473CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
474 USE(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
476 // The sequence of instructions that is patched out for aging code is the
477 // following boilerplate stack-building prologue that is found both in
478 // FUNCTION and OPTIMIZED_FUNCTION code:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000479 CodePatcher patcher(isolate, young_sequence_.start(),
480 young_sequence_.length());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 patcher.masm()->pushq(rbp);
482 patcher.masm()->movp(rbp, rsp);
483 patcher.masm()->Push(rsi);
484 patcher.masm()->Push(rdi);
485}
486
487
488#ifdef DEBUG
489bool CodeAgingHelper::IsOld(byte* candidate) const {
490 return *candidate == kCallOpcode;
491}
492#endif
493
494
495bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
496 bool result = isolate->code_aging_helper()->IsYoung(sequence);
497 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
498 return result;
499}
500
501
502void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
503 MarkingParity* parity) {
504 if (IsYoungSequence(isolate, sequence)) {
505 *age = kNoAgeCodeAge;
506 *parity = NO_MARKING_PARITY;
507 } else {
508 sequence++; // Skip the kCallOpcode byte
509 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
510 Assembler::kCallTargetAddressOffset;
511 Code* stub = GetCodeFromTargetAddress(target_address);
512 GetCodeAgeAndParity(stub, age, parity);
513 }
514}
515
516
517void Code::PatchPlatformCodeAge(Isolate* isolate,
518 byte* sequence,
519 Code::Age age,
520 MarkingParity parity) {
521 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
522 if (age == kNoAgeCodeAge) {
523 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000524 Assembler::FlushICache(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525 } else {
526 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000527 CodePatcher patcher(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000528 patcher.masm()->call(stub->instruction_start());
529 patcher.masm()->Nop(
530 kNoCodeAgeSequenceLength - Assembler::kShortCallInstructionLength);
531 }
532}
533
534
535Operand StackArgumentsAccessor::GetArgumentOperand(int index) {
536 DCHECK(index >= 0);
537 int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
538 int displacement_to_last_argument = base_reg_.is(rsp) ?
539 kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
540 displacement_to_last_argument += extra_displacement_to_last_argument_;
541 if (argument_count_reg_.is(no_reg)) {
542 // argument[0] is at base_reg_ + displacement_to_last_argument +
543 // (argument_count_immediate_ + receiver - 1) * kPointerSize.
544 DCHECK(argument_count_immediate_ + receiver > 0);
545 return Operand(base_reg_, displacement_to_last_argument +
546 (argument_count_immediate_ + receiver - 1 - index) * kPointerSize);
547 } else {
548 // argument[0] is at base_reg_ + displacement_to_last_argument +
549 // argument_count_reg_ * times_pointer_size + (receiver - 1) * kPointerSize.
550 return Operand(base_reg_, argument_count_reg_, times_pointer_size,
551 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize);
552 }
553}
554
555
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556} // namespace internal
557} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +0100558
559#endif // V8_TARGET_ARCH_X64