blob: 18e53641e676d2ba2ea35f2a3ba2af5640ca48cf [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/ia32/codegen-ia32.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/codegen.h"
10#include "src/heap/heap.h"
11#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010016
17// -------------------------------------------------------------------------
18// Platform-specific RuntimeCallHelper functions.
19
Ben Murdochb0fe1622011-05-05 13:52:32 +010020void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010021 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010023 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010024}
25
26
Ben Murdochb0fe1622011-05-05 13:52:32 +010027void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010028 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010030 masm->set_has_frame(false);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010031}
32
33
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010034#define __ masm.
35
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010038 size_t actual_size;
39 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 byte* buffer =
41 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 if (buffer == nullptr) return nullptr;
43 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
44 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010045 // esp[1 * kPointerSize]: raw double input
46 // esp[0 * kPointerSize]: return address
47 // Move double input into registers.
48 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000049 __ movsd(xmm0, Operand(esp, 1 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010050 __ sqrtsd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 __ movsd(Operand(esp, 1 * kPointerSize), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010052 // Load result into floating point register as return value.
53 __ fld_d(Operand(esp, 1 * kPointerSize));
54 __ Ret();
55 }
56
57 CodeDesc desc;
58 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010060
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000061 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000062 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010064}
65
66
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067// Helper functions for CreateMemMoveFunction.
68#undef __
69#define __ ACCESS_MASM(masm)
70
71enum Direction { FORWARD, BACKWARD };
72enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
73
74// Expects registers:
75// esi - source, aligned if alignment == ALIGNED
76// edi - destination, always aligned
77// ecx - count (copy size in bytes)
78// edx - loop count (number of 64 byte chunks)
79void MemMoveEmitMainLoop(MacroAssembler* masm,
80 Label* move_last_15,
81 Direction direction,
82 Alignment alignment) {
83 Register src = esi;
84 Register dst = edi;
85 Register count = ecx;
86 Register loop_count = edx;
87 Label loop, move_last_31, move_last_63;
88 __ cmp(loop_count, 0);
89 __ j(equal, &move_last_63);
90 __ bind(&loop);
91 // Main loop. Copy in 64 byte chunks.
92 if (direction == BACKWARD) __ sub(src, Immediate(0x40));
93 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
94 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
95 __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
96 __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
97 if (direction == FORWARD) __ add(src, Immediate(0x40));
98 if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
99 __ movdqa(Operand(dst, 0x00), xmm0);
100 __ movdqa(Operand(dst, 0x10), xmm1);
101 __ movdqa(Operand(dst, 0x20), xmm2);
102 __ movdqa(Operand(dst, 0x30), xmm3);
103 if (direction == FORWARD) __ add(dst, Immediate(0x40));
104 __ dec(loop_count);
105 __ j(not_zero, &loop);
106 // At most 63 bytes left to copy.
107 __ bind(&move_last_63);
108 __ test(count, Immediate(0x20));
109 __ j(zero, &move_last_31);
110 if (direction == BACKWARD) __ sub(src, Immediate(0x20));
111 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
112 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
113 if (direction == FORWARD) __ add(src, Immediate(0x20));
114 if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
115 __ movdqa(Operand(dst, 0x00), xmm0);
116 __ movdqa(Operand(dst, 0x10), xmm1);
117 if (direction == FORWARD) __ add(dst, Immediate(0x20));
118 // At most 31 bytes left to copy.
119 __ bind(&move_last_31);
120 __ test(count, Immediate(0x10));
121 __ j(zero, move_last_15);
122 if (direction == BACKWARD) __ sub(src, Immediate(0x10));
123 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
124 if (direction == FORWARD) __ add(src, Immediate(0x10));
125 if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
126 __ movdqa(Operand(dst, 0), xmm0);
127 if (direction == FORWARD) __ add(dst, Immediate(0x10));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100128}
129
130
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
132 __ pop(esi);
133 __ pop(edi);
134 __ ret(0);
135}
136
137
138#undef __
139#define __ masm.
140
141
142class LabelConverter {
143 public:
144 explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
145 int32_t address(Label* l) const {
146 return reinterpret_cast<int32_t>(buffer_) + l->pos();
147 }
148 private:
149 byte* buffer_;
150};
151
152
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000153MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100154 size_t actual_size;
155 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000156 byte* buffer =
157 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000158 if (buffer == nullptr) return nullptr;
159 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
160 CodeObjectRequired::kNo);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000161 LabelConverter conv(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100162
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 // Generated code is put into a fixed, unmovable buffer, and not into
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100164 // the V8 heap. We can't, and don't, refer to any relocatable addresses
165 // (e.g. the JavaScript nan-object).
166
167 // 32-bit C declaration function calls pass arguments on stack.
168
169 // Stack layout:
170 // esp[12]: Third argument, size.
171 // esp[8]: Second argument, source pointer.
172 // esp[4]: First argument, destination pointer.
173 // esp[0]: return address
174
175 const int kDestinationOffset = 1 * kPointerSize;
176 const int kSourceOffset = 2 * kPointerSize;
177 const int kSizeOffset = 3 * kPointerSize;
178
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 // When copying up to this many bytes, use special "small" handlers.
180 const size_t kSmallCopySize = 8;
181 // When copying up to this many bytes, use special "medium" handlers.
182 const size_t kMediumCopySize = 63;
183 // When non-overlapping region of src and dst is less than this,
184 // use a more careful implementation (slightly slower).
185 const size_t kMinMoveDistance = 16;
186 // Note that these values are dictated by the implementation below,
187 // do not just change them and hope things will work!
188
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100189 int stack_offset = 0; // Update if we change the stack height.
190
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191 Label backward, backward_much_overlap;
192 Label forward_much_overlap, small_size, medium_size, pop_and_return;
193 __ push(edi);
194 __ push(esi);
195 stack_offset += 2 * kPointerSize;
196 Register dst = edi;
197 Register src = esi;
198 Register count = ecx;
199 Register loop_count = edx;
200 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
201 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
202 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100203
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204 __ cmp(dst, src);
205 __ j(equal, &pop_and_return);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100206
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000207 __ prefetch(Operand(src, 0), 1);
208 __ cmp(count, kSmallCopySize);
209 __ j(below_equal, &small_size);
210 __ cmp(count, kMediumCopySize);
211 __ j(below_equal, &medium_size);
212 __ cmp(dst, src);
213 __ j(above, &backward);
214
215 {
216 // |dst| is a lower address than |src|. Copy front-to-back.
217 Label unaligned_source, move_last_15, skip_last_move;
218 __ mov(eax, src);
219 __ sub(eax, dst);
220 __ cmp(eax, kMinMoveDistance);
221 __ j(below, &forward_much_overlap);
222 // Copy first 16 bytes.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100223 __ movdqu(xmm0, Operand(src, 0));
224 __ movdqu(Operand(dst, 0), xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000225 // Determine distance to alignment: 16 - (dst & 0xF).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100226 __ mov(edx, dst);
227 __ and_(edx, 0xF);
228 __ neg(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100229 __ add(edx, Immediate(16));
230 __ add(dst, edx);
231 __ add(src, edx);
232 __ sub(count, edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000233 // dst is now aligned. Main copy loop.
234 __ mov(loop_count, count);
235 __ shr(loop_count, 6);
236 // Check if src is also aligned.
237 __ test(src, Immediate(0xF));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100238 __ j(not_zero, &unaligned_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239 // Copy loop for aligned source and destination.
240 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_ALIGNED);
241 // At most 15 bytes to copy. Copy 16 bytes at end of string.
242 __ bind(&move_last_15);
243 __ and_(count, 0xF);
244 __ j(zero, &skip_last_move, Label::kNear);
245 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
246 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
247 __ bind(&skip_last_move);
248 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100249
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000250 // Copy loop for unaligned source and aligned destination.
251 __ bind(&unaligned_source);
252 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
253 __ jmp(&move_last_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100254
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255 // Less than kMinMoveDistance offset between dst and src.
256 Label loop_until_aligned, last_15_much_overlap;
257 __ bind(&loop_until_aligned);
258 __ mov_b(eax, Operand(src, 0));
259 __ inc(src);
260 __ mov_b(Operand(dst, 0), eax);
261 __ inc(dst);
262 __ dec(count);
263 __ bind(&forward_much_overlap); // Entry point into this block.
264 __ test(dst, Immediate(0xF));
265 __ j(not_zero, &loop_until_aligned);
266 // dst is now aligned, src can't be. Main copy loop.
267 __ mov(loop_count, count);
268 __ shr(loop_count, 6);
269 MemMoveEmitMainLoop(&masm, &last_15_much_overlap,
270 FORWARD, MOVE_UNALIGNED);
271 __ bind(&last_15_much_overlap);
272 __ and_(count, 0xF);
273 __ j(zero, &pop_and_return);
274 __ cmp(count, kSmallCopySize);
275 __ j(below_equal, &small_size);
276 __ jmp(&medium_size);
277 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100278
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000279 {
280 // |dst| is a higher address than |src|. Copy backwards.
281 Label unaligned_source, move_first_15, skip_last_move;
282 __ bind(&backward);
283 // |dst| and |src| always point to the end of what's left to copy.
284 __ add(dst, count);
285 __ add(src, count);
286 __ mov(eax, dst);
287 __ sub(eax, src);
288 __ cmp(eax, kMinMoveDistance);
289 __ j(below, &backward_much_overlap);
290 // Copy last 16 bytes.
291 __ movdqu(xmm0, Operand(src, -0x10));
292 __ movdqu(Operand(dst, -0x10), xmm0);
293 // Find distance to alignment: dst & 0xF
294 __ mov(edx, dst);
295 __ and_(edx, 0xF);
296 __ sub(dst, edx);
297 __ sub(src, edx);
298 __ sub(count, edx);
299 // dst is now aligned. Main copy loop.
300 __ mov(loop_count, count);
301 __ shr(loop_count, 6);
302 // Check if src is also aligned.
303 __ test(src, Immediate(0xF));
304 __ j(not_zero, &unaligned_source);
305 // Copy loop for aligned source and destination.
306 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
307 // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
308 __ bind(&move_first_15);
309 __ and_(count, 0xF);
310 __ j(zero, &skip_last_move, Label::kNear);
311 __ sub(src, count);
312 __ sub(dst, count);
313 __ movdqu(xmm0, Operand(src, 0));
314 __ movdqu(Operand(dst, 0), xmm0);
315 __ bind(&skip_last_move);
316 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100317
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000318 // Copy loop for unaligned source and aligned destination.
319 __ bind(&unaligned_source);
320 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
321 __ jmp(&move_first_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100322
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323 // Less than kMinMoveDistance offset between dst and src.
324 Label loop_until_aligned, first_15_much_overlap;
325 __ bind(&loop_until_aligned);
326 __ dec(src);
327 __ dec(dst);
328 __ mov_b(eax, Operand(src, 0));
329 __ mov_b(Operand(dst, 0), eax);
330 __ dec(count);
331 __ bind(&backward_much_overlap); // Entry point into this block.
332 __ test(dst, Immediate(0xF));
333 __ j(not_zero, &loop_until_aligned);
334 // dst is now aligned, src can't be. Main copy loop.
335 __ mov(loop_count, count);
336 __ shr(loop_count, 6);
337 MemMoveEmitMainLoop(&masm, &first_15_much_overlap,
338 BACKWARD, MOVE_UNALIGNED);
339 __ bind(&first_15_much_overlap);
340 __ and_(count, 0xF);
341 __ j(zero, &pop_and_return);
342 // Small/medium handlers expect dst/src to point to the beginning.
343 __ sub(dst, count);
344 __ sub(src, count);
345 __ cmp(count, kSmallCopySize);
346 __ j(below_equal, &small_size);
347 __ jmp(&medium_size);
348 }
349 {
350 // Special handlers for 9 <= copy_size < 64. No assumptions about
351 // alignment or move distance, so all reads must be unaligned and
352 // must happen before any writes.
353 Label medium_handlers, f9_16, f17_32, f33_48, f49_63;
354
355 __ bind(&f9_16);
356 __ movsd(xmm0, Operand(src, 0));
357 __ movsd(xmm1, Operand(src, count, times_1, -8));
358 __ movsd(Operand(dst, 0), xmm0);
359 __ movsd(Operand(dst, count, times_1, -8), xmm1);
360 MemMoveEmitPopAndReturn(&masm);
361
362 __ bind(&f17_32);
363 __ movdqu(xmm0, Operand(src, 0));
364 __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
365 __ movdqu(Operand(dst, 0x00), xmm0);
366 __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
367 MemMoveEmitPopAndReturn(&masm);
368
369 __ bind(&f33_48);
370 __ movdqu(xmm0, Operand(src, 0x00));
371 __ movdqu(xmm1, Operand(src, 0x10));
372 __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
373 __ movdqu(Operand(dst, 0x00), xmm0);
374 __ movdqu(Operand(dst, 0x10), xmm1);
375 __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
376 MemMoveEmitPopAndReturn(&masm);
377
378 __ bind(&f49_63);
379 __ movdqu(xmm0, Operand(src, 0x00));
380 __ movdqu(xmm1, Operand(src, 0x10));
381 __ movdqu(xmm2, Operand(src, 0x20));
382 __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
383 __ movdqu(Operand(dst, 0x00), xmm0);
384 __ movdqu(Operand(dst, 0x10), xmm1);
385 __ movdqu(Operand(dst, 0x20), xmm2);
386 __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
387 MemMoveEmitPopAndReturn(&masm);
388
389 __ bind(&medium_handlers);
390 __ dd(conv.address(&f9_16));
391 __ dd(conv.address(&f17_32));
392 __ dd(conv.address(&f33_48));
393 __ dd(conv.address(&f49_63));
394
395 __ bind(&medium_size); // Entry point into this block.
396 __ mov(eax, count);
397 __ dec(eax);
398 __ shr(eax, 4);
399 if (FLAG_debug_code) {
400 Label ok;
401 __ cmp(eax, 3);
402 __ j(below_equal, &ok);
403 __ int3();
404 __ bind(&ok);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100405 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 __ mov(eax, Operand(eax, times_4, conv.address(&medium_handlers)));
407 __ jmp(eax);
408 }
409 {
410 // Specialized copiers for copy_size <= 8 bytes.
411 Label small_handlers, f0, f1, f2, f3, f4, f5_8;
412 __ bind(&f0);
413 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100414
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000415 __ bind(&f1);
416 __ mov_b(eax, Operand(src, 0));
417 __ mov_b(Operand(dst, 0), eax);
418 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100419
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000420 __ bind(&f2);
421 __ mov_w(eax, Operand(src, 0));
422 __ mov_w(Operand(dst, 0), eax);
423 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100424
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000425 __ bind(&f3);
426 __ mov_w(eax, Operand(src, 0));
427 __ mov_b(edx, Operand(src, 2));
428 __ mov_w(Operand(dst, 0), eax);
429 __ mov_b(Operand(dst, 2), edx);
430 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100431
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000432 __ bind(&f4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100433 __ mov(eax, Operand(src, 0));
434 __ mov(Operand(dst, 0), eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000435 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100436
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437 __ bind(&f5_8);
438 __ mov(eax, Operand(src, 0));
439 __ mov(edx, Operand(src, count, times_1, -4));
440 __ mov(Operand(dst, 0), eax);
441 __ mov(Operand(dst, count, times_1, -4), edx);
442 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100443
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444 __ bind(&small_handlers);
445 __ dd(conv.address(&f0));
446 __ dd(conv.address(&f1));
447 __ dd(conv.address(&f2));
448 __ dd(conv.address(&f3));
449 __ dd(conv.address(&f4));
450 __ dd(conv.address(&f5_8));
451 __ dd(conv.address(&f5_8));
452 __ dd(conv.address(&f5_8));
453 __ dd(conv.address(&f5_8));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100454
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 __ bind(&small_size); // Entry point into this block.
456 if (FLAG_debug_code) {
457 Label ok;
458 __ cmp(count, 8);
459 __ j(below_equal, &ok);
460 __ int3();
461 __ bind(&ok);
462 }
463 __ mov(eax, Operand(count, times_4, conv.address(&small_handlers)));
464 __ jmp(eax);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100465 }
466
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 __ bind(&pop_and_return);
468 MemMoveEmitPopAndReturn(&masm);
469
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100470 CodeDesc desc;
471 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000473 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474 base::OS::ProtectCode(buffer, actual_size);
475 // TODO(jkummerow): It would be nice to register this code creation event
476 // with the PROFILE / GDBJIT system.
477 return FUNCTION_CAST<MemMoveFunction>(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100478}
479
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100481#undef __
482
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100483// -------------------------------------------------------------------------
484// Code generators
485
486#define __ ACCESS_MASM(masm)
487
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000488
489void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
490 MacroAssembler* masm,
491 Register receiver,
492 Register key,
493 Register value,
494 Register target_map,
495 AllocationSiteMode mode,
496 Label* allocation_memento_found) {
497 Register scratch = edi;
498 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
499
500 if (mode == TRACK_ALLOCATION_SITE) {
501 DCHECK(allocation_memento_found != NULL);
502 __ JumpIfJSArrayHasAllocationMemento(
503 receiver, scratch, allocation_memento_found);
504 }
505
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100506 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
508 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100509 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510 target_map,
511 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100512 kDontSaveFPRegs,
513 EMIT_REMEMBERED_SET,
514 OMIT_SMI_CHECK);
515}
516
517
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000518void ElementsTransitionGenerator::GenerateSmiToDouble(
519 MacroAssembler* masm,
520 Register receiver,
521 Register key,
522 Register value,
523 Register target_map,
524 AllocationSiteMode mode,
525 Label* fail) {
526 // Return address is on the stack.
527 DCHECK(receiver.is(edx));
528 DCHECK(key.is(ecx));
529 DCHECK(value.is(eax));
530 DCHECK(target_map.is(ebx));
531
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100532 Label loop, entry, convert_hole, gc_required, only_change_map;
533
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 if (mode == TRACK_ALLOCATION_SITE) {
535 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
536 }
537
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100538 // Check for empty arrays, which only require a map transition and no changes
539 // to the backing store.
540 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
541 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
542 __ j(equal, &only_change_map);
543
544 __ push(eax);
545 __ push(ebx);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100546 __ push(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100547
548 __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
549
550 // Allocate new FixedDoubleArray.
551 // edx: receiver
552 // edi: length of source FixedArray (smi-tagged)
Ben Murdochc5610432016-08-08 18:44:38 +0100553 AllocationFlags flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000554 __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
555 REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100556
557 // eax: destination FixedDoubleArray
558 // edi: number of elements
559 // edx: receiver
560 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
561 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
562 __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
563 __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
564 // Replace receiver's backing store with newly created FixedDoubleArray.
565 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
566 __ mov(ebx, eax);
567 __ RecordWriteField(edx,
568 JSObject::kElementsOffset,
569 ebx,
570 edi,
571 kDontSaveFPRegs,
572 EMIT_REMEMBERED_SET,
573 OMIT_SMI_CHECK);
574
575 __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
576
577 // Prepare for conversion loop.
578 ExternalReference canonical_the_hole_nan_reference =
579 ExternalReference::address_of_the_hole_nan();
580 XMMRegister the_hole_nan = xmm1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000581 __ movsd(the_hole_nan,
582 Operand::StaticVariable(canonical_the_hole_nan_reference));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100583 __ jmp(&entry);
584
585 // Call into runtime if GC is required.
586 __ bind(&gc_required);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100587
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100588 // Restore registers before jumping into runtime.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100589 __ pop(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100590 __ pop(ebx);
591 __ pop(eax);
592 __ jmp(fail);
593
594 // Convert and copy elements
595 // esi: source FixedArray
596 __ bind(&loop);
597 __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
598 // ebx: current element from source
599 // edi: index of current element
600 __ JumpIfNotSmi(ebx, &convert_hole);
601
602 // Normal smi, convert it to double and store.
603 __ SmiUntag(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000604 __ Cvtsi2sd(xmm0, ebx);
605 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
606 xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100607 __ jmp(&entry);
608
609 // Found hole, store hole_nan_as_double instead.
610 __ bind(&convert_hole);
611
612 if (FLAG_debug_code) {
613 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000614 __ Assert(equal, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100615 }
616
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
618 the_hole_nan);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100619
620 __ bind(&entry);
621 __ sub(edi, Immediate(Smi::FromInt(1)));
622 __ j(not_sign, &loop);
623
Ben Murdoch61f157c2016-09-16 13:49:30 +0100624 // Restore registers.
625 __ pop(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100626 __ pop(ebx);
627 __ pop(eax);
628
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100629 __ bind(&only_change_map);
630 // eax: value
631 // ebx: target map
632 // Set transitioned map.
633 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
634 __ RecordWriteField(edx,
635 HeapObject::kMapOffset,
636 ebx,
637 edi,
638 kDontSaveFPRegs,
639 OMIT_REMEMBERED_SET,
640 OMIT_SMI_CHECK);
641}
642
643
644void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645 MacroAssembler* masm,
646 Register receiver,
647 Register key,
648 Register value,
649 Register target_map,
650 AllocationSiteMode mode,
651 Label* fail) {
652 // Return address is on the stack.
653 DCHECK(receiver.is(edx));
654 DCHECK(key.is(ecx));
655 DCHECK(value.is(eax));
656 DCHECK(target_map.is(ebx));
657
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100658 Label loop, entry, convert_hole, gc_required, only_change_map, success;
659
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000660 if (mode == TRACK_ALLOCATION_SITE) {
661 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
662 }
663
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100664 // Check for empty arrays, which only require a map transition and no changes
665 // to the backing store.
666 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
667 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
668 __ j(equal, &only_change_map);
669
Ben Murdochda12d292016-06-02 14:46:10 +0100670 __ push(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100671 __ push(eax);
672 __ push(edx);
673 __ push(ebx);
674
675 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
676
677 // Allocate new FixedArray.
678 // ebx: length of source FixedDoubleArray (smi-tagged)
679 __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
Ben Murdochc5610432016-08-08 18:44:38 +0100680 __ Allocate(edi, eax, esi, no_reg, &gc_required, NO_ALLOCATION_FLAGS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100681
682 // eax: destination FixedArray
683 // ebx: number of elements
684 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
685 Immediate(masm->isolate()->factory()->fixed_array_map()));
686 __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
687 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
688
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400689 // Allocating heap numbers in the loop below can fail and cause a jump to
690 // gc_required. We can't leave a partly initialized FixedArray behind,
691 // so pessimistically fill it with holes now.
692 Label initialization_loop, initialization_loop_entry;
693 __ jmp(&initialization_loop_entry, Label::kNear);
694 __ bind(&initialization_loop);
695 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
696 masm->isolate()->factory()->the_hole_value());
697 __ bind(&initialization_loop_entry);
698 __ sub(ebx, Immediate(Smi::FromInt(1)));
699 __ j(not_sign, &initialization_loop);
700
701 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100702 __ jmp(&entry);
703
704 // ebx: target map
705 // edx: receiver
706 // Set transitioned map.
707 __ bind(&only_change_map);
708 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
709 __ RecordWriteField(edx,
710 HeapObject::kMapOffset,
711 ebx,
712 edi,
713 kDontSaveFPRegs,
714 OMIT_REMEMBERED_SET,
715 OMIT_SMI_CHECK);
716 __ jmp(&success);
717
718 // Call into runtime if GC is required.
719 __ bind(&gc_required);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100720 __ pop(ebx);
721 __ pop(edx);
722 __ pop(eax);
Ben Murdochda12d292016-06-02 14:46:10 +0100723 __ pop(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100724 __ jmp(fail);
725
726 // Box doubles into heap numbers.
727 // edi: source FixedDoubleArray
728 // eax: destination FixedArray
729 __ bind(&loop);
730 // ebx: index of current element (smi-tagged)
731 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
732 __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
733 __ j(equal, &convert_hole);
734
735 // Non-hole double, copy value into a heap number.
736 __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
737 // edx: new heap number
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000738 __ movsd(xmm0,
739 FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
740 __ movsd(FieldOperand(edx, HeapNumber::kValueOffset), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100741 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
742 __ mov(esi, ebx);
743 __ RecordWriteArray(eax,
744 edx,
745 esi,
746 kDontSaveFPRegs,
747 EMIT_REMEMBERED_SET,
748 OMIT_SMI_CHECK);
749 __ jmp(&entry, Label::kNear);
750
751 // Replace the-hole NaN with the-hole pointer.
752 __ bind(&convert_hole);
753 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
754 masm->isolate()->factory()->the_hole_value());
755
756 __ bind(&entry);
757 __ sub(ebx, Immediate(Smi::FromInt(1)));
758 __ j(not_sign, &loop);
759
760 __ pop(ebx);
761 __ pop(edx);
762 // ebx: target map
763 // edx: receiver
764 // Set transitioned map.
765 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
766 __ RecordWriteField(edx,
767 HeapObject::kMapOffset,
768 ebx,
769 edi,
770 kDontSaveFPRegs,
771 OMIT_REMEMBERED_SET,
772 OMIT_SMI_CHECK);
773 // Replace receiver's backing store with newly created and filled FixedArray.
774 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
775 __ RecordWriteField(edx,
776 JSObject::kElementsOffset,
777 eax,
778 edi,
779 kDontSaveFPRegs,
780 EMIT_REMEMBERED_SET,
781 OMIT_SMI_CHECK);
782
783 // Restore registers.
784 __ pop(eax);
Ben Murdochda12d292016-06-02 14:46:10 +0100785 __ pop(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100786
787 __ bind(&success);
788}
789
790
791void StringCharLoadGenerator::Generate(MacroAssembler* masm,
792 Factory* factory,
793 Register string,
794 Register index,
795 Register result,
796 Label* call_runtime) {
797 // Fetch the instance type of the receiver into result register.
798 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
799 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
800
801 // We need special handling for indirect strings.
802 Label check_sequential;
803 __ test(result, Immediate(kIsIndirectStringMask));
804 __ j(zero, &check_sequential, Label::kNear);
805
806 // Dispatch on the indirect string shape: slice or cons.
807 Label cons_string;
808 __ test(result, Immediate(kSlicedNotConsMask));
809 __ j(zero, &cons_string, Label::kNear);
810
811 // Handle slices.
812 Label indirect_string_loaded;
813 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
814 __ SmiUntag(result);
815 __ add(index, result);
816 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
817 __ jmp(&indirect_string_loaded, Label::kNear);
818
819 // Handle cons strings.
820 // Check whether the right hand side is the empty string (i.e. if
821 // this is really a flat string in a cons string). If that is not
822 // the case we would rather go to the runtime system now to flatten
823 // the string.
824 __ bind(&cons_string);
825 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
826 Immediate(factory->empty_string()));
827 __ j(not_equal, call_runtime);
828 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
829
830 __ bind(&indirect_string_loaded);
831 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
832 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
833
834 // Distinguish sequential and external strings. Only these two string
835 // representations can reach here (slices and flat cons strings have been
836 // reduced to the underlying sequential or external string).
837 Label seq_string;
838 __ bind(&check_sequential);
839 STATIC_ASSERT(kSeqStringTag == 0);
840 __ test(result, Immediate(kStringRepresentationMask));
841 __ j(zero, &seq_string, Label::kNear);
842
843 // Handle external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000844 Label one_byte_external, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100845 if (FLAG_debug_code) {
846 // Assert that we do not have a cons or slice (indirect strings) here.
847 // Sequential strings have already been ruled out.
848 __ test(result, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000849 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100850 }
851 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000852 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdochda12d292016-06-02 14:46:10 +0100853 __ test_b(result, Immediate(kShortExternalStringMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100854 __ j(not_zero, call_runtime);
855 // Check encoding.
856 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdochda12d292016-06-02 14:46:10 +0100857 __ test_b(result, Immediate(kStringEncodingMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100858 __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000859 __ j(not_equal, &one_byte_external, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100860 // Two-byte string.
861 __ movzx_w(result, Operand(result, index, times_2, 0));
862 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000863 __ bind(&one_byte_external);
864 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100865 __ movzx_b(result, Operand(result, index, times_1, 0));
866 __ jmp(&done, Label::kNear);
867
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 // Dispatch on the encoding: one-byte or two-byte.
869 Label one_byte;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100870 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100872 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
873 __ test(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000874 __ j(not_zero, &one_byte, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100875
876 // Two-byte string.
877 // Load the two-byte character code into the result register.
878 __ movzx_w(result, FieldOperand(string,
879 index,
880 times_2,
881 SeqTwoByteString::kHeaderSize));
882 __ jmp(&done, Label::kNear);
883
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100885 // Load the byte into the result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ bind(&one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100887 __ movzx_b(result, FieldOperand(string,
888 index,
889 times_1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000890 SeqOneByteString::kHeaderSize));
891 __ bind(&done);
892}
893
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100894#undef __
895
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000897CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
898 USE(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000900 CodePatcher patcher(isolate, young_sequence_.start(),
901 young_sequence_.length());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 patcher.masm()->push(ebp);
903 patcher.masm()->mov(ebp, esp);
904 patcher.masm()->push(esi);
905 patcher.masm()->push(edi);
906}
907
908
909#ifdef DEBUG
910bool CodeAgingHelper::IsOld(byte* candidate) const {
911 return *candidate == kCallOpcode;
912}
913#endif
914
915
916bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
917 bool result = isolate->code_aging_helper()->IsYoung(sequence);
918 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
919 return result;
920}
921
922
923void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
924 MarkingParity* parity) {
925 if (IsYoungSequence(isolate, sequence)) {
926 *age = kNoAgeCodeAge;
927 *parity = NO_MARKING_PARITY;
928 } else {
929 sequence++; // Skip the kCallOpcode byte
930 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
931 Assembler::kCallTargetAddressOffset;
932 Code* stub = GetCodeFromTargetAddress(target_address);
933 GetCodeAgeAndParity(stub, age, parity);
934 }
935}
936
937
938void Code::PatchPlatformCodeAge(Isolate* isolate,
939 byte* sequence,
940 Code::Age age,
941 MarkingParity parity) {
942 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
943 if (age == kNoAgeCodeAge) {
944 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000945 Assembler::FlushICache(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 } else {
947 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 CodePatcher patcher(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000949 patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
950 }
951}
952
953
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000954} // namespace internal
955} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +0100956
957#endif // V8_TARGET_ARCH_IA32