blob: 1321461a14ffa109cfefcd6e897138415c9129cd [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_X87
8
9#include "src/codegen.h"
10#include "src/heap/heap.h"
11#include "src/macro-assembler.h"
12
13namespace v8 {
14namespace internal {
15
16
17// -------------------------------------------------------------------------
18// Platform-specific RuntimeCallHelper functions.
19
20void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
21 masm->EnterFrame(StackFrame::INTERNAL);
22 DCHECK(!masm->has_frame());
23 masm->set_has_frame(true);
24}
25
26
27void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
28 masm->LeaveFrame(StackFrame::INTERNAL);
29 DCHECK(masm->has_frame());
30 masm->set_has_frame(false);
31}
32
33
34#define __ masm.
35
36
37UnaryMathFunction CreateExpFunction() {
38 // No SSE2 support
39 return &std::exp;
40}
41
42
43UnaryMathFunction CreateSqrtFunction() {
44 // No SSE2 support
45 return &std::sqrt;
46}
47
48
49// Helper functions for CreateMemMoveFunction.
50#undef __
51#define __ ACCESS_MASM(masm)
52
53enum Direction { FORWARD, BACKWARD };
54enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
55
56
57void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
58 __ pop(esi);
59 __ pop(edi);
60 __ ret(0);
61}
62
63
64#undef __
65#define __ masm.
66
67
68class LabelConverter {
69 public:
70 explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
71 int32_t address(Label* l) const {
72 return reinterpret_cast<int32_t>(buffer_) + l->pos();
73 }
74 private:
75 byte* buffer_;
76};
77
78
79MemMoveFunction CreateMemMoveFunction() {
80 size_t actual_size;
81 // Allocate buffer in executable space.
82 byte* buffer =
83 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
84 if (buffer == NULL) return NULL;
85 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
86 LabelConverter conv(buffer);
87
88 // Generated code is put into a fixed, unmovable buffer, and not into
89 // the V8 heap. We can't, and don't, refer to any relocatable addresses
90 // (e.g. the JavaScript nan-object).
91
92 // 32-bit C declaration function calls pass arguments on stack.
93
94 // Stack layout:
95 // esp[12]: Third argument, size.
96 // esp[8]: Second argument, source pointer.
97 // esp[4]: First argument, destination pointer.
98 // esp[0]: return address
99
100 const int kDestinationOffset = 1 * kPointerSize;
101 const int kSourceOffset = 2 * kPointerSize;
102 const int kSizeOffset = 3 * kPointerSize;
103
104 int stack_offset = 0; // Update if we change the stack height.
105
106 Label backward, backward_much_overlap;
107 Label forward_much_overlap, small_size, medium_size, pop_and_return;
108 __ push(edi);
109 __ push(esi);
110 stack_offset += 2 * kPointerSize;
111 Register dst = edi;
112 Register src = esi;
113 Register count = ecx;
114 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
115 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
116 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
117
118 __ cmp(dst, src);
119 __ j(equal, &pop_and_return);
120
121 // No SSE2.
122 Label forward;
123 __ cmp(count, 0);
124 __ j(equal, &pop_and_return);
125 __ cmp(dst, src);
126 __ j(above, &backward);
127 __ jmp(&forward);
128 {
129 // Simple forward copier.
130 Label forward_loop_1byte, forward_loop_4byte;
131 __ bind(&forward_loop_4byte);
132 __ mov(eax, Operand(src, 0));
133 __ sub(count, Immediate(4));
134 __ add(src, Immediate(4));
135 __ mov(Operand(dst, 0), eax);
136 __ add(dst, Immediate(4));
137 __ bind(&forward); // Entry point.
138 __ cmp(count, 3);
139 __ j(above, &forward_loop_4byte);
140 __ bind(&forward_loop_1byte);
141 __ cmp(count, 0);
142 __ j(below_equal, &pop_and_return);
143 __ mov_b(eax, Operand(src, 0));
144 __ dec(count);
145 __ inc(src);
146 __ mov_b(Operand(dst, 0), eax);
147 __ inc(dst);
148 __ jmp(&forward_loop_1byte);
149 }
150 {
151 // Simple backward copier.
152 Label backward_loop_1byte, backward_loop_4byte, entry_shortcut;
153 __ bind(&backward);
154 __ add(src, count);
155 __ add(dst, count);
156 __ cmp(count, 3);
157 __ j(below_equal, &entry_shortcut);
158
159 __ bind(&backward_loop_4byte);
160 __ sub(src, Immediate(4));
161 __ sub(count, Immediate(4));
162 __ mov(eax, Operand(src, 0));
163 __ sub(dst, Immediate(4));
164 __ mov(Operand(dst, 0), eax);
165 __ cmp(count, 3);
166 __ j(above, &backward_loop_4byte);
167 __ bind(&backward_loop_1byte);
168 __ cmp(count, 0);
169 __ j(below_equal, &pop_and_return);
170 __ bind(&entry_shortcut);
171 __ dec(src);
172 __ dec(count);
173 __ mov_b(eax, Operand(src, 0));
174 __ dec(dst);
175 __ mov_b(Operand(dst, 0), eax);
176 __ jmp(&backward_loop_1byte);
177 }
178
179 __ bind(&pop_and_return);
180 MemMoveEmitPopAndReturn(&masm);
181
182 CodeDesc desc;
183 masm.GetCode(&desc);
184 DCHECK(!RelocInfo::RequiresRelocation(desc));
185 CpuFeatures::FlushICache(buffer, actual_size);
186 base::OS::ProtectCode(buffer, actual_size);
187 // TODO(jkummerow): It would be nice to register this code creation event
188 // with the PROFILE / GDBJIT system.
189 return FUNCTION_CAST<MemMoveFunction>(buffer);
190}
191
192
193#undef __
194
195// -------------------------------------------------------------------------
196// Code generators
197
198#define __ ACCESS_MASM(masm)
199
200
201void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
202 MacroAssembler* masm,
203 Register receiver,
204 Register key,
205 Register value,
206 Register target_map,
207 AllocationSiteMode mode,
208 Label* allocation_memento_found) {
209 Register scratch = edi;
210 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
211
212 if (mode == TRACK_ALLOCATION_SITE) {
213 DCHECK(allocation_memento_found != NULL);
214 __ JumpIfJSArrayHasAllocationMemento(
215 receiver, scratch, allocation_memento_found);
216 }
217
218 // Set transitioned map.
219 __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
220 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch,
221 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
222}
223
224
225void ElementsTransitionGenerator::GenerateSmiToDouble(
226 MacroAssembler* masm,
227 Register receiver,
228 Register key,
229 Register value,
230 Register target_map,
231 AllocationSiteMode mode,
232 Label* fail) {
233 // Return address is on the stack.
234 DCHECK(receiver.is(edx));
235 DCHECK(key.is(ecx));
236 DCHECK(value.is(eax));
237 DCHECK(target_map.is(ebx));
238
239 Label loop, entry, convert_hole, gc_required, only_change_map;
240
241 if (mode == TRACK_ALLOCATION_SITE) {
242 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
243 }
244
245 // Check for empty arrays, which only require a map transition and no changes
246 // to the backing store.
247 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
248 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
249 __ j(equal, &only_change_map);
250
251 __ push(eax);
252 __ push(ebx);
253
254 __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
255
256 // Allocate new FixedDoubleArray.
257 // edx: receiver
258 // edi: length of source FixedArray (smi-tagged)
259 AllocationFlags flags =
260 static_cast<AllocationFlags>(TAG_OBJECT | DOUBLE_ALIGNMENT);
261 __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
262 REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
263
264 // eax: destination FixedDoubleArray
265 // edi: number of elements
266 // edx: receiver
267 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
268 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
269 __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
270 __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
271 // Replace receiver's backing store with newly created FixedDoubleArray.
272 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
273 __ mov(ebx, eax);
274 __ RecordWriteField(edx, JSObject::kElementsOffset, ebx, edi, kDontSaveFPRegs,
275 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
276
277 __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
278
279 // Prepare for conversion loop.
280 ExternalReference canonical_the_hole_nan_reference =
281 ExternalReference::address_of_the_hole_nan();
282 __ jmp(&entry);
283
284 // Call into runtime if GC is required.
285 __ bind(&gc_required);
286 // Restore registers before jumping into runtime.
287 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
288 __ pop(ebx);
289 __ pop(eax);
290 __ jmp(fail);
291
292 // Convert and copy elements
293 // esi: source FixedArray
294 __ bind(&loop);
295 __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
296 // ebx: current element from source
297 // edi: index of current element
298 __ JumpIfNotSmi(ebx, &convert_hole);
299
300 // Normal smi, convert it to double and store.
301 __ SmiUntag(ebx);
302 __ push(ebx);
303 __ fild_s(Operand(esp, 0));
304 __ pop(ebx);
305 __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
306 __ jmp(&entry);
307
308 // Found hole, store hole_nan_as_double instead.
309 __ bind(&convert_hole);
310
311 if (FLAG_debug_code) {
312 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
313 __ Assert(equal, kObjectFoundInSmiOnlyArray);
314 }
315
316 __ fld_d(Operand::StaticVariable(canonical_the_hole_nan_reference));
317 __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
318
319 __ bind(&entry);
320 __ sub(edi, Immediate(Smi::FromInt(1)));
321 __ j(not_sign, &loop);
322
323 __ pop(ebx);
324 __ pop(eax);
325
326 // Restore esi.
327 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
328
329 __ bind(&only_change_map);
330 // eax: value
331 // ebx: target map
332 // Set transitioned map.
333 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
334 __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
335 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
336}
337
338
339void ElementsTransitionGenerator::GenerateDoubleToObject(
340 MacroAssembler* masm,
341 Register receiver,
342 Register key,
343 Register value,
344 Register target_map,
345 AllocationSiteMode mode,
346 Label* fail) {
347 // Return address is on the stack.
348 DCHECK(receiver.is(edx));
349 DCHECK(key.is(ecx));
350 DCHECK(value.is(eax));
351 DCHECK(target_map.is(ebx));
352
353 Label loop, entry, convert_hole, gc_required, only_change_map, success;
354
355 if (mode == TRACK_ALLOCATION_SITE) {
356 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
357 }
358
359 // Check for empty arrays, which only require a map transition and no changes
360 // to the backing store.
361 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
362 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
363 __ j(equal, &only_change_map);
364
365 __ push(eax);
366 __ push(edx);
367 __ push(ebx);
368
369 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
370
371 // Allocate new FixedArray.
372 // ebx: length of source FixedDoubleArray (smi-tagged)
373 __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
374 __ Allocate(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
375
376 // eax: destination FixedArray
377 // ebx: number of elements
378 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
379 Immediate(masm->isolate()->factory()->fixed_array_map()));
380 __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
381 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
382
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400383 // Allocating heap numbers in the loop below can fail and cause a jump to
384 // gc_required. We can't leave a partly initialized FixedArray behind,
385 // so pessimistically fill it with holes now.
386 Label initialization_loop, initialization_loop_entry;
387 __ jmp(&initialization_loop_entry, Label::kNear);
388 __ bind(&initialization_loop);
389 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
390 masm->isolate()->factory()->the_hole_value());
391 __ bind(&initialization_loop_entry);
392 __ sub(ebx, Immediate(Smi::FromInt(1)));
393 __ j(not_sign, &initialization_loop);
394
395 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000396 __ jmp(&entry);
397
398 // ebx: target map
399 // edx: receiver
400 // Set transitioned map.
401 __ bind(&only_change_map);
402 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
403 __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
404 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
405 __ jmp(&success);
406
407 // Call into runtime if GC is required.
408 __ bind(&gc_required);
409 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
410 __ pop(ebx);
411 __ pop(edx);
412 __ pop(eax);
413 __ jmp(fail);
414
415 // Box doubles into heap numbers.
416 // edi: source FixedDoubleArray
417 // eax: destination FixedArray
418 __ bind(&loop);
419 // ebx: index of current element (smi-tagged)
420 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
421 __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
422 __ j(equal, &convert_hole);
423
424 // Non-hole double, copy value into a heap number.
425 __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
426 // edx: new heap number
427 __ mov(esi, FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
428 __ mov(FieldOperand(edx, HeapNumber::kValueOffset), esi);
429 __ mov(esi, FieldOperand(edi, ebx, times_4, offset));
430 __ mov(FieldOperand(edx, HeapNumber::kValueOffset + kPointerSize), esi);
431 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
432 __ mov(esi, ebx);
433 __ RecordWriteArray(eax, edx, esi, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
434 OMIT_SMI_CHECK);
435 __ jmp(&entry, Label::kNear);
436
437 // Replace the-hole NaN with the-hole pointer.
438 __ bind(&convert_hole);
439 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
440 masm->isolate()->factory()->the_hole_value());
441
442 __ bind(&entry);
443 __ sub(ebx, Immediate(Smi::FromInt(1)));
444 __ j(not_sign, &loop);
445
446 __ pop(ebx);
447 __ pop(edx);
448 // ebx: target map
449 // edx: receiver
450 // Set transitioned map.
451 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
452 __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
453 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
454 // Replace receiver's backing store with newly created and filled FixedArray.
455 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
456 __ RecordWriteField(edx, JSObject::kElementsOffset, eax, edi, kDontSaveFPRegs,
457 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
458
459 // Restore registers.
460 __ pop(eax);
461 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
462
463 __ bind(&success);
464}
465
466
467void StringCharLoadGenerator::Generate(MacroAssembler* masm,
468 Factory* factory,
469 Register string,
470 Register index,
471 Register result,
472 Label* call_runtime) {
473 // Fetch the instance type of the receiver into result register.
474 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
475 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
476
477 // We need special handling for indirect strings.
478 Label check_sequential;
479 __ test(result, Immediate(kIsIndirectStringMask));
480 __ j(zero, &check_sequential, Label::kNear);
481
482 // Dispatch on the indirect string shape: slice or cons.
483 Label cons_string;
484 __ test(result, Immediate(kSlicedNotConsMask));
485 __ j(zero, &cons_string, Label::kNear);
486
487 // Handle slices.
488 Label indirect_string_loaded;
489 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
490 __ SmiUntag(result);
491 __ add(index, result);
492 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
493 __ jmp(&indirect_string_loaded, Label::kNear);
494
495 // Handle cons strings.
496 // Check whether the right hand side is the empty string (i.e. if
497 // this is really a flat string in a cons string). If that is not
498 // the case we would rather go to the runtime system now to flatten
499 // the string.
500 __ bind(&cons_string);
501 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
502 Immediate(factory->empty_string()));
503 __ j(not_equal, call_runtime);
504 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
505
506 __ bind(&indirect_string_loaded);
507 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
508 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
509
510 // Distinguish sequential and external strings. Only these two string
511 // representations can reach here (slices and flat cons strings have been
512 // reduced to the underlying sequential or external string).
513 Label seq_string;
514 __ bind(&check_sequential);
515 STATIC_ASSERT(kSeqStringTag == 0);
516 __ test(result, Immediate(kStringRepresentationMask));
517 __ j(zero, &seq_string, Label::kNear);
518
519 // Handle external strings.
520 Label one_byte_external, done;
521 if (FLAG_debug_code) {
522 // Assert that we do not have a cons or slice (indirect strings) here.
523 // Sequential strings have already been ruled out.
524 __ test(result, Immediate(kIsIndirectStringMask));
525 __ Assert(zero, kExternalStringExpectedButNotFound);
526 }
527 // Rule out short external strings.
528 STATIC_ASSERT(kShortExternalStringTag != 0);
529 __ test_b(result, kShortExternalStringMask);
530 __ j(not_zero, call_runtime);
531 // Check encoding.
532 STATIC_ASSERT(kTwoByteStringTag == 0);
533 __ test_b(result, kStringEncodingMask);
534 __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
535 __ j(not_equal, &one_byte_external, Label::kNear);
536 // Two-byte string.
537 __ movzx_w(result, Operand(result, index, times_2, 0));
538 __ jmp(&done, Label::kNear);
539 __ bind(&one_byte_external);
540 // One-byte string.
541 __ movzx_b(result, Operand(result, index, times_1, 0));
542 __ jmp(&done, Label::kNear);
543
544 // Dispatch on the encoding: one-byte or two-byte.
545 Label one_byte;
546 __ bind(&seq_string);
547 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
548 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
549 __ test(result, Immediate(kStringEncodingMask));
550 __ j(not_zero, &one_byte, Label::kNear);
551
552 // Two-byte string.
553 // Load the two-byte character code into the result register.
554 __ movzx_w(result, FieldOperand(string,
555 index,
556 times_2,
557 SeqTwoByteString::kHeaderSize));
558 __ jmp(&done, Label::kNear);
559
560 // One-byte string.
561 // Load the byte into the result register.
562 __ bind(&one_byte);
563 __ movzx_b(result, FieldOperand(string,
564 index,
565 times_1,
566 SeqOneByteString::kHeaderSize));
567 __ bind(&done);
568}
569
570
571#undef __
572
573
574CodeAgingHelper::CodeAgingHelper() {
575 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
576 CodePatcher patcher(young_sequence_.start(), young_sequence_.length());
577 patcher.masm()->push(ebp);
578 patcher.masm()->mov(ebp, esp);
579 patcher.masm()->push(esi);
580 patcher.masm()->push(edi);
581}
582
583
584#ifdef DEBUG
585bool CodeAgingHelper::IsOld(byte* candidate) const {
586 return *candidate == kCallOpcode;
587}
588#endif
589
590
591bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
592 bool result = isolate->code_aging_helper()->IsYoung(sequence);
593 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
594 return result;
595}
596
597
598void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
599 MarkingParity* parity) {
600 if (IsYoungSequence(isolate, sequence)) {
601 *age = kNoAgeCodeAge;
602 *parity = NO_MARKING_PARITY;
603 } else {
604 sequence++; // Skip the kCallOpcode byte
605 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
606 Assembler::kCallTargetAddressOffset;
607 Code* stub = GetCodeFromTargetAddress(target_address);
608 GetCodeAgeAndParity(stub, age, parity);
609 }
610}
611
612
613void Code::PatchPlatformCodeAge(Isolate* isolate,
614 byte* sequence,
615 Code::Age age,
616 MarkingParity parity) {
617 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
618 if (age == kNoAgeCodeAge) {
619 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
620 CpuFeatures::FlushICache(sequence, young_length);
621 } else {
622 Code* stub = GetCodeAgeStub(isolate, age, parity);
623 CodePatcher patcher(sequence, young_length);
624 patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
625 }
626}
627
628
629} } // namespace v8::internal
630
631#endif // V8_TARGET_ARCH_X87