blob: 52cf72b7a6111368f16fb616c4f90d6f55e61c45 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/codegen.h"
10#include "src/heap/heap.h"
11#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010016
17// -------------------------------------------------------------------------
18// Platform-specific RuntimeCallHelper functions.
19
Ben Murdochb0fe1622011-05-05 13:52:32 +010020void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010021 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010023 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010024}
25
26
Ben Murdochb0fe1622011-05-05 13:52:32 +010027void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010028 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010030 masm->set_has_frame(false);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010031}
32
33
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010034#define __ masm.
35
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037UnaryMathFunction CreateExpFunction() {
38 if (!FLAG_fast_math) return &std::exp;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010039 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 byte* buffer =
41 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
42 if (buffer == NULL) return &std::exp;
43 ExternalReference::InitializeMathExpData();
Ben Murdoch3ef787d2012-04-12 10:51:47 +010044
45 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
46 // esp[1 * kPointerSize]: raw double input
47 // esp[0 * kPointerSize]: return address
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 {
49 XMMRegister input = xmm1;
50 XMMRegister result = xmm2;
51 __ movsd(input, Operand(esp, 1 * kPointerSize));
52 __ push(eax);
53 __ push(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010054
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 MathExpGenerator::EmitMathExp(&masm, input, result, xmm0, eax, ebx);
56
57 __ pop(ebx);
58 __ pop(eax);
59 __ movsd(Operand(esp, 1 * kPointerSize), result);
60 __ fld_d(Operand(esp, 1 * kPointerSize));
61 __ Ret();
62 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +010063
64 CodeDesc desc;
65 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067
Ben Murdochb8a8cc12014-11-26 15:28:44 +000068 CpuFeatures::FlushICache(buffer, actual_size);
69 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010070 return FUNCTION_CAST<UnaryMathFunction>(buffer);
71}
72
73
74UnaryMathFunction CreateSqrtFunction() {
75 size_t actual_size;
76 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 byte* buffer =
78 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
79 if (buffer == NULL) return &std::sqrt;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010080 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
81 // esp[1 * kPointerSize]: raw double input
82 // esp[0 * kPointerSize]: return address
83 // Move double input into registers.
84 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 __ movsd(xmm0, Operand(esp, 1 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010086 __ sqrtsd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087 __ movsd(Operand(esp, 1 * kPointerSize), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010088 // Load result into floating point register as return value.
89 __ fld_d(Operand(esp, 1 * kPointerSize));
90 __ Ret();
91 }
92
93 CodeDesc desc;
94 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010096
Ben Murdochb8a8cc12014-11-26 15:28:44 +000097 CpuFeatures::FlushICache(buffer, actual_size);
98 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010099 return FUNCTION_CAST<UnaryMathFunction>(buffer);
100}
101
102
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103// Helper functions for CreateMemMoveFunction.
104#undef __
105#define __ ACCESS_MASM(masm)
106
107enum Direction { FORWARD, BACKWARD };
108enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
109
110// Expects registers:
111// esi - source, aligned if alignment == ALIGNED
112// edi - destination, always aligned
113// ecx - count (copy size in bytes)
114// edx - loop count (number of 64 byte chunks)
115void MemMoveEmitMainLoop(MacroAssembler* masm,
116 Label* move_last_15,
117 Direction direction,
118 Alignment alignment) {
119 Register src = esi;
120 Register dst = edi;
121 Register count = ecx;
122 Register loop_count = edx;
123 Label loop, move_last_31, move_last_63;
124 __ cmp(loop_count, 0);
125 __ j(equal, &move_last_63);
126 __ bind(&loop);
127 // Main loop. Copy in 64 byte chunks.
128 if (direction == BACKWARD) __ sub(src, Immediate(0x40));
129 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
130 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
131 __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
132 __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
133 if (direction == FORWARD) __ add(src, Immediate(0x40));
134 if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
135 __ movdqa(Operand(dst, 0x00), xmm0);
136 __ movdqa(Operand(dst, 0x10), xmm1);
137 __ movdqa(Operand(dst, 0x20), xmm2);
138 __ movdqa(Operand(dst, 0x30), xmm3);
139 if (direction == FORWARD) __ add(dst, Immediate(0x40));
140 __ dec(loop_count);
141 __ j(not_zero, &loop);
142 // At most 63 bytes left to copy.
143 __ bind(&move_last_63);
144 __ test(count, Immediate(0x20));
145 __ j(zero, &move_last_31);
146 if (direction == BACKWARD) __ sub(src, Immediate(0x20));
147 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
148 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
149 if (direction == FORWARD) __ add(src, Immediate(0x20));
150 if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
151 __ movdqa(Operand(dst, 0x00), xmm0);
152 __ movdqa(Operand(dst, 0x10), xmm1);
153 if (direction == FORWARD) __ add(dst, Immediate(0x20));
154 // At most 31 bytes left to copy.
155 __ bind(&move_last_31);
156 __ test(count, Immediate(0x10));
157 __ j(zero, move_last_15);
158 if (direction == BACKWARD) __ sub(src, Immediate(0x10));
159 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
160 if (direction == FORWARD) __ add(src, Immediate(0x10));
161 if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
162 __ movdqa(Operand(dst, 0), xmm0);
163 if (direction == FORWARD) __ add(dst, Immediate(0x10));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100164}
165
166
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000167void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
168 __ pop(esi);
169 __ pop(edi);
170 __ ret(0);
171}
172
173
174#undef __
175#define __ masm.
176
177
178class LabelConverter {
179 public:
180 explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
181 int32_t address(Label* l) const {
182 return reinterpret_cast<int32_t>(buffer_) + l->pos();
183 }
184 private:
185 byte* buffer_;
186};
187
188
189MemMoveFunction CreateMemMoveFunction() {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100190 size_t actual_size;
191 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 byte* buffer =
193 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
194 if (buffer == NULL) return NULL;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100195 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000196 LabelConverter conv(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100197
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000198 // Generated code is put into a fixed, unmovable buffer, and not into
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100199 // the V8 heap. We can't, and don't, refer to any relocatable addresses
200 // (e.g. the JavaScript nan-object).
201
202 // 32-bit C declaration function calls pass arguments on stack.
203
204 // Stack layout:
205 // esp[12]: Third argument, size.
206 // esp[8]: Second argument, source pointer.
207 // esp[4]: First argument, destination pointer.
208 // esp[0]: return address
209
210 const int kDestinationOffset = 1 * kPointerSize;
211 const int kSourceOffset = 2 * kPointerSize;
212 const int kSizeOffset = 3 * kPointerSize;
213
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 // When copying up to this many bytes, use special "small" handlers.
215 const size_t kSmallCopySize = 8;
216 // When copying up to this many bytes, use special "medium" handlers.
217 const size_t kMediumCopySize = 63;
218 // When non-overlapping region of src and dst is less than this,
219 // use a more careful implementation (slightly slower).
220 const size_t kMinMoveDistance = 16;
221 // Note that these values are dictated by the implementation below,
222 // do not just change them and hope things will work!
223
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100224 int stack_offset = 0; // Update if we change the stack height.
225
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226 Label backward, backward_much_overlap;
227 Label forward_much_overlap, small_size, medium_size, pop_and_return;
228 __ push(edi);
229 __ push(esi);
230 stack_offset += 2 * kPointerSize;
231 Register dst = edi;
232 Register src = esi;
233 Register count = ecx;
234 Register loop_count = edx;
235 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
236 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
237 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100238
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239 __ cmp(dst, src);
240 __ j(equal, &pop_and_return);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100241
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 __ prefetch(Operand(src, 0), 1);
243 __ cmp(count, kSmallCopySize);
244 __ j(below_equal, &small_size);
245 __ cmp(count, kMediumCopySize);
246 __ j(below_equal, &medium_size);
247 __ cmp(dst, src);
248 __ j(above, &backward);
249
250 {
251 // |dst| is a lower address than |src|. Copy front-to-back.
252 Label unaligned_source, move_last_15, skip_last_move;
253 __ mov(eax, src);
254 __ sub(eax, dst);
255 __ cmp(eax, kMinMoveDistance);
256 __ j(below, &forward_much_overlap);
257 // Copy first 16 bytes.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100258 __ movdqu(xmm0, Operand(src, 0));
259 __ movdqu(Operand(dst, 0), xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000260 // Determine distance to alignment: 16 - (dst & 0xF).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100261 __ mov(edx, dst);
262 __ and_(edx, 0xF);
263 __ neg(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100264 __ add(edx, Immediate(16));
265 __ add(dst, edx);
266 __ add(src, edx);
267 __ sub(count, edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 // dst is now aligned. Main copy loop.
269 __ mov(loop_count, count);
270 __ shr(loop_count, 6);
271 // Check if src is also aligned.
272 __ test(src, Immediate(0xF));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100273 __ j(not_zero, &unaligned_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 // Copy loop for aligned source and destination.
275 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_ALIGNED);
276 // At most 15 bytes to copy. Copy 16 bytes at end of string.
277 __ bind(&move_last_15);
278 __ and_(count, 0xF);
279 __ j(zero, &skip_last_move, Label::kNear);
280 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
281 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
282 __ bind(&skip_last_move);
283 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100284
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000285 // Copy loop for unaligned source and aligned destination.
286 __ bind(&unaligned_source);
287 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
288 __ jmp(&move_last_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100289
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290 // Less than kMinMoveDistance offset between dst and src.
291 Label loop_until_aligned, last_15_much_overlap;
292 __ bind(&loop_until_aligned);
293 __ mov_b(eax, Operand(src, 0));
294 __ inc(src);
295 __ mov_b(Operand(dst, 0), eax);
296 __ inc(dst);
297 __ dec(count);
298 __ bind(&forward_much_overlap); // Entry point into this block.
299 __ test(dst, Immediate(0xF));
300 __ j(not_zero, &loop_until_aligned);
301 // dst is now aligned, src can't be. Main copy loop.
302 __ mov(loop_count, count);
303 __ shr(loop_count, 6);
304 MemMoveEmitMainLoop(&masm, &last_15_much_overlap,
305 FORWARD, MOVE_UNALIGNED);
306 __ bind(&last_15_much_overlap);
307 __ and_(count, 0xF);
308 __ j(zero, &pop_and_return);
309 __ cmp(count, kSmallCopySize);
310 __ j(below_equal, &small_size);
311 __ jmp(&medium_size);
312 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100313
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314 {
315 // |dst| is a higher address than |src|. Copy backwards.
316 Label unaligned_source, move_first_15, skip_last_move;
317 __ bind(&backward);
318 // |dst| and |src| always point to the end of what's left to copy.
319 __ add(dst, count);
320 __ add(src, count);
321 __ mov(eax, dst);
322 __ sub(eax, src);
323 __ cmp(eax, kMinMoveDistance);
324 __ j(below, &backward_much_overlap);
325 // Copy last 16 bytes.
326 __ movdqu(xmm0, Operand(src, -0x10));
327 __ movdqu(Operand(dst, -0x10), xmm0);
328 // Find distance to alignment: dst & 0xF
329 __ mov(edx, dst);
330 __ and_(edx, 0xF);
331 __ sub(dst, edx);
332 __ sub(src, edx);
333 __ sub(count, edx);
334 // dst is now aligned. Main copy loop.
335 __ mov(loop_count, count);
336 __ shr(loop_count, 6);
337 // Check if src is also aligned.
338 __ test(src, Immediate(0xF));
339 __ j(not_zero, &unaligned_source);
340 // Copy loop for aligned source and destination.
341 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
342 // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
343 __ bind(&move_first_15);
344 __ and_(count, 0xF);
345 __ j(zero, &skip_last_move, Label::kNear);
346 __ sub(src, count);
347 __ sub(dst, count);
348 __ movdqu(xmm0, Operand(src, 0));
349 __ movdqu(Operand(dst, 0), xmm0);
350 __ bind(&skip_last_move);
351 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100352
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000353 // Copy loop for unaligned source and aligned destination.
354 __ bind(&unaligned_source);
355 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
356 __ jmp(&move_first_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100357
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 // Less than kMinMoveDistance offset between dst and src.
359 Label loop_until_aligned, first_15_much_overlap;
360 __ bind(&loop_until_aligned);
361 __ dec(src);
362 __ dec(dst);
363 __ mov_b(eax, Operand(src, 0));
364 __ mov_b(Operand(dst, 0), eax);
365 __ dec(count);
366 __ bind(&backward_much_overlap); // Entry point into this block.
367 __ test(dst, Immediate(0xF));
368 __ j(not_zero, &loop_until_aligned);
369 // dst is now aligned, src can't be. Main copy loop.
370 __ mov(loop_count, count);
371 __ shr(loop_count, 6);
372 MemMoveEmitMainLoop(&masm, &first_15_much_overlap,
373 BACKWARD, MOVE_UNALIGNED);
374 __ bind(&first_15_much_overlap);
375 __ and_(count, 0xF);
376 __ j(zero, &pop_and_return);
377 // Small/medium handlers expect dst/src to point to the beginning.
378 __ sub(dst, count);
379 __ sub(src, count);
380 __ cmp(count, kSmallCopySize);
381 __ j(below_equal, &small_size);
382 __ jmp(&medium_size);
383 }
384 {
385 // Special handlers for 9 <= copy_size < 64. No assumptions about
386 // alignment or move distance, so all reads must be unaligned and
387 // must happen before any writes.
388 Label medium_handlers, f9_16, f17_32, f33_48, f49_63;
389
390 __ bind(&f9_16);
391 __ movsd(xmm0, Operand(src, 0));
392 __ movsd(xmm1, Operand(src, count, times_1, -8));
393 __ movsd(Operand(dst, 0), xmm0);
394 __ movsd(Operand(dst, count, times_1, -8), xmm1);
395 MemMoveEmitPopAndReturn(&masm);
396
397 __ bind(&f17_32);
398 __ movdqu(xmm0, Operand(src, 0));
399 __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
400 __ movdqu(Operand(dst, 0x00), xmm0);
401 __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
402 MemMoveEmitPopAndReturn(&masm);
403
404 __ bind(&f33_48);
405 __ movdqu(xmm0, Operand(src, 0x00));
406 __ movdqu(xmm1, Operand(src, 0x10));
407 __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
408 __ movdqu(Operand(dst, 0x00), xmm0);
409 __ movdqu(Operand(dst, 0x10), xmm1);
410 __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
411 MemMoveEmitPopAndReturn(&masm);
412
413 __ bind(&f49_63);
414 __ movdqu(xmm0, Operand(src, 0x00));
415 __ movdqu(xmm1, Operand(src, 0x10));
416 __ movdqu(xmm2, Operand(src, 0x20));
417 __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
418 __ movdqu(Operand(dst, 0x00), xmm0);
419 __ movdqu(Operand(dst, 0x10), xmm1);
420 __ movdqu(Operand(dst, 0x20), xmm2);
421 __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
422 MemMoveEmitPopAndReturn(&masm);
423
424 __ bind(&medium_handlers);
425 __ dd(conv.address(&f9_16));
426 __ dd(conv.address(&f17_32));
427 __ dd(conv.address(&f33_48));
428 __ dd(conv.address(&f49_63));
429
430 __ bind(&medium_size); // Entry point into this block.
431 __ mov(eax, count);
432 __ dec(eax);
433 __ shr(eax, 4);
434 if (FLAG_debug_code) {
435 Label ok;
436 __ cmp(eax, 3);
437 __ j(below_equal, &ok);
438 __ int3();
439 __ bind(&ok);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100440 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441 __ mov(eax, Operand(eax, times_4, conv.address(&medium_handlers)));
442 __ jmp(eax);
443 }
444 {
445 // Specialized copiers for copy_size <= 8 bytes.
446 Label small_handlers, f0, f1, f2, f3, f4, f5_8;
447 __ bind(&f0);
448 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100449
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 __ bind(&f1);
451 __ mov_b(eax, Operand(src, 0));
452 __ mov_b(Operand(dst, 0), eax);
453 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100454
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 __ bind(&f2);
456 __ mov_w(eax, Operand(src, 0));
457 __ mov_w(Operand(dst, 0), eax);
458 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100459
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460 __ bind(&f3);
461 __ mov_w(eax, Operand(src, 0));
462 __ mov_b(edx, Operand(src, 2));
463 __ mov_w(Operand(dst, 0), eax);
464 __ mov_b(Operand(dst, 2), edx);
465 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100466
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 __ bind(&f4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100468 __ mov(eax, Operand(src, 0));
469 __ mov(Operand(dst, 0), eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000470 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100471
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 __ bind(&f5_8);
473 __ mov(eax, Operand(src, 0));
474 __ mov(edx, Operand(src, count, times_1, -4));
475 __ mov(Operand(dst, 0), eax);
476 __ mov(Operand(dst, count, times_1, -4), edx);
477 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100478
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 __ bind(&small_handlers);
480 __ dd(conv.address(&f0));
481 __ dd(conv.address(&f1));
482 __ dd(conv.address(&f2));
483 __ dd(conv.address(&f3));
484 __ dd(conv.address(&f4));
485 __ dd(conv.address(&f5_8));
486 __ dd(conv.address(&f5_8));
487 __ dd(conv.address(&f5_8));
488 __ dd(conv.address(&f5_8));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100489
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 __ bind(&small_size); // Entry point into this block.
491 if (FLAG_debug_code) {
492 Label ok;
493 __ cmp(count, 8);
494 __ j(below_equal, &ok);
495 __ int3();
496 __ bind(&ok);
497 }
498 __ mov(eax, Operand(count, times_4, conv.address(&small_handlers)));
499 __ jmp(eax);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100500 }
501
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502 __ bind(&pop_and_return);
503 MemMoveEmitPopAndReturn(&masm);
504
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100505 CodeDesc desc;
506 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 DCHECK(!RelocInfo::RequiresRelocation(desc));
508 CpuFeatures::FlushICache(buffer, actual_size);
509 base::OS::ProtectCode(buffer, actual_size);
510 // TODO(jkummerow): It would be nice to register this code creation event
511 // with the PROFILE / GDBJIT system.
512 return FUNCTION_CAST<MemMoveFunction>(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100513}
514
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100516#undef __
517
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100518// -------------------------------------------------------------------------
519// Code generators
520
521#define __ ACCESS_MASM(masm)
522
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000523
524void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
525 MacroAssembler* masm,
526 Register receiver,
527 Register key,
528 Register value,
529 Register target_map,
530 AllocationSiteMode mode,
531 Label* allocation_memento_found) {
532 Register scratch = edi;
533 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
534
535 if (mode == TRACK_ALLOCATION_SITE) {
536 DCHECK(allocation_memento_found != NULL);
537 __ JumpIfJSArrayHasAllocationMemento(
538 receiver, scratch, allocation_memento_found);
539 }
540
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100541 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
543 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100544 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000545 target_map,
546 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100547 kDontSaveFPRegs,
548 EMIT_REMEMBERED_SET,
549 OMIT_SMI_CHECK);
550}
551
552
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553void ElementsTransitionGenerator::GenerateSmiToDouble(
554 MacroAssembler* masm,
555 Register receiver,
556 Register key,
557 Register value,
558 Register target_map,
559 AllocationSiteMode mode,
560 Label* fail) {
561 // Return address is on the stack.
562 DCHECK(receiver.is(edx));
563 DCHECK(key.is(ecx));
564 DCHECK(value.is(eax));
565 DCHECK(target_map.is(ebx));
566
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100567 Label loop, entry, convert_hole, gc_required, only_change_map;
568
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569 if (mode == TRACK_ALLOCATION_SITE) {
570 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
571 }
572
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100573 // Check for empty arrays, which only require a map transition and no changes
574 // to the backing store.
575 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
576 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
577 __ j(equal, &only_change_map);
578
579 __ push(eax);
580 __ push(ebx);
581
582 __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
583
584 // Allocate new FixedDoubleArray.
585 // edx: receiver
586 // edi: length of source FixedArray (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587 AllocationFlags flags =
588 static_cast<AllocationFlags>(TAG_OBJECT | DOUBLE_ALIGNMENT);
589 __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
590 REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591
592 // eax: destination FixedDoubleArray
593 // edi: number of elements
594 // edx: receiver
595 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
596 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
597 __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
598 __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
599 // Replace receiver's backing store with newly created FixedDoubleArray.
600 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
601 __ mov(ebx, eax);
602 __ RecordWriteField(edx,
603 JSObject::kElementsOffset,
604 ebx,
605 edi,
606 kDontSaveFPRegs,
607 EMIT_REMEMBERED_SET,
608 OMIT_SMI_CHECK);
609
610 __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
611
612 // Prepare for conversion loop.
613 ExternalReference canonical_the_hole_nan_reference =
614 ExternalReference::address_of_the_hole_nan();
615 XMMRegister the_hole_nan = xmm1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000616 __ movsd(the_hole_nan,
617 Operand::StaticVariable(canonical_the_hole_nan_reference));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100618 __ jmp(&entry);
619
620 // Call into runtime if GC is required.
621 __ bind(&gc_required);
622 // Restore registers before jumping into runtime.
623 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
624 __ pop(ebx);
625 __ pop(eax);
626 __ jmp(fail);
627
628 // Convert and copy elements
629 // esi: source FixedArray
630 __ bind(&loop);
631 __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
632 // ebx: current element from source
633 // edi: index of current element
634 __ JumpIfNotSmi(ebx, &convert_hole);
635
636 // Normal smi, convert it to double and store.
637 __ SmiUntag(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 __ Cvtsi2sd(xmm0, ebx);
639 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
640 xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100641 __ jmp(&entry);
642
643 // Found hole, store hole_nan_as_double instead.
644 __ bind(&convert_hole);
645
646 if (FLAG_debug_code) {
647 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648 __ Assert(equal, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100649 }
650
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000651 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
652 the_hole_nan);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100653
654 __ bind(&entry);
655 __ sub(edi, Immediate(Smi::FromInt(1)));
656 __ j(not_sign, &loop);
657
658 __ pop(ebx);
659 __ pop(eax);
660
661 // Restore esi.
662 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
663
664 __ bind(&only_change_map);
665 // eax: value
666 // ebx: target map
667 // Set transitioned map.
668 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
669 __ RecordWriteField(edx,
670 HeapObject::kMapOffset,
671 ebx,
672 edi,
673 kDontSaveFPRegs,
674 OMIT_REMEMBERED_SET,
675 OMIT_SMI_CHECK);
676}
677
678
679void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000680 MacroAssembler* masm,
681 Register receiver,
682 Register key,
683 Register value,
684 Register target_map,
685 AllocationSiteMode mode,
686 Label* fail) {
687 // Return address is on the stack.
688 DCHECK(receiver.is(edx));
689 DCHECK(key.is(ecx));
690 DCHECK(value.is(eax));
691 DCHECK(target_map.is(ebx));
692
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100693 Label loop, entry, convert_hole, gc_required, only_change_map, success;
694
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 if (mode == TRACK_ALLOCATION_SITE) {
696 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
697 }
698
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100699 // Check for empty arrays, which only require a map transition and no changes
700 // to the backing store.
701 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
702 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
703 __ j(equal, &only_change_map);
704
705 __ push(eax);
706 __ push(edx);
707 __ push(ebx);
708
709 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
710
711 // Allocate new FixedArray.
712 // ebx: length of source FixedDoubleArray (smi-tagged)
713 __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000714 __ Allocate(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100715
716 // eax: destination FixedArray
717 // ebx: number of elements
718 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
719 Immediate(masm->isolate()->factory()->fixed_array_map()));
720 __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
721 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
722
723 __ jmp(&entry);
724
725 // ebx: target map
726 // edx: receiver
727 // Set transitioned map.
728 __ bind(&only_change_map);
729 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
730 __ RecordWriteField(edx,
731 HeapObject::kMapOffset,
732 ebx,
733 edi,
734 kDontSaveFPRegs,
735 OMIT_REMEMBERED_SET,
736 OMIT_SMI_CHECK);
737 __ jmp(&success);
738
739 // Call into runtime if GC is required.
740 __ bind(&gc_required);
741 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
742 __ pop(ebx);
743 __ pop(edx);
744 __ pop(eax);
745 __ jmp(fail);
746
747 // Box doubles into heap numbers.
748 // edi: source FixedDoubleArray
749 // eax: destination FixedArray
750 __ bind(&loop);
751 // ebx: index of current element (smi-tagged)
752 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
753 __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
754 __ j(equal, &convert_hole);
755
756 // Non-hole double, copy value into a heap number.
757 __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
758 // edx: new heap number
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 __ movsd(xmm0,
760 FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
761 __ movsd(FieldOperand(edx, HeapNumber::kValueOffset), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100762 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
763 __ mov(esi, ebx);
764 __ RecordWriteArray(eax,
765 edx,
766 esi,
767 kDontSaveFPRegs,
768 EMIT_REMEMBERED_SET,
769 OMIT_SMI_CHECK);
770 __ jmp(&entry, Label::kNear);
771
772 // Replace the-hole NaN with the-hole pointer.
773 __ bind(&convert_hole);
774 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
775 masm->isolate()->factory()->the_hole_value());
776
777 __ bind(&entry);
778 __ sub(ebx, Immediate(Smi::FromInt(1)));
779 __ j(not_sign, &loop);
780
781 __ pop(ebx);
782 __ pop(edx);
783 // ebx: target map
784 // edx: receiver
785 // Set transitioned map.
786 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
787 __ RecordWriteField(edx,
788 HeapObject::kMapOffset,
789 ebx,
790 edi,
791 kDontSaveFPRegs,
792 OMIT_REMEMBERED_SET,
793 OMIT_SMI_CHECK);
794 // Replace receiver's backing store with newly created and filled FixedArray.
795 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
796 __ RecordWriteField(edx,
797 JSObject::kElementsOffset,
798 eax,
799 edi,
800 kDontSaveFPRegs,
801 EMIT_REMEMBERED_SET,
802 OMIT_SMI_CHECK);
803
804 // Restore registers.
805 __ pop(eax);
806 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
807
808 __ bind(&success);
809}
810
811
812void StringCharLoadGenerator::Generate(MacroAssembler* masm,
813 Factory* factory,
814 Register string,
815 Register index,
816 Register result,
817 Label* call_runtime) {
818 // Fetch the instance type of the receiver into result register.
819 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
820 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
821
822 // We need special handling for indirect strings.
823 Label check_sequential;
824 __ test(result, Immediate(kIsIndirectStringMask));
825 __ j(zero, &check_sequential, Label::kNear);
826
827 // Dispatch on the indirect string shape: slice or cons.
828 Label cons_string;
829 __ test(result, Immediate(kSlicedNotConsMask));
830 __ j(zero, &cons_string, Label::kNear);
831
832 // Handle slices.
833 Label indirect_string_loaded;
834 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
835 __ SmiUntag(result);
836 __ add(index, result);
837 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
838 __ jmp(&indirect_string_loaded, Label::kNear);
839
840 // Handle cons strings.
841 // Check whether the right hand side is the empty string (i.e. if
842 // this is really a flat string in a cons string). If that is not
843 // the case we would rather go to the runtime system now to flatten
844 // the string.
845 __ bind(&cons_string);
846 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
847 Immediate(factory->empty_string()));
848 __ j(not_equal, call_runtime);
849 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
850
851 __ bind(&indirect_string_loaded);
852 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
853 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
854
855 // Distinguish sequential and external strings. Only these two string
856 // representations can reach here (slices and flat cons strings have been
857 // reduced to the underlying sequential or external string).
858 Label seq_string;
859 __ bind(&check_sequential);
860 STATIC_ASSERT(kSeqStringTag == 0);
861 __ test(result, Immediate(kStringRepresentationMask));
862 __ j(zero, &seq_string, Label::kNear);
863
864 // Handle external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000865 Label one_byte_external, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100866 if (FLAG_debug_code) {
867 // Assert that we do not have a cons or slice (indirect strings) here.
868 // Sequential strings have already been ruled out.
869 __ test(result, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000870 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100871 }
872 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000873 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100874 __ test_b(result, kShortExternalStringMask);
875 __ j(not_zero, call_runtime);
876 // Check encoding.
877 STATIC_ASSERT(kTwoByteStringTag == 0);
878 __ test_b(result, kStringEncodingMask);
879 __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880 __ j(not_equal, &one_byte_external, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100881 // Two-byte string.
882 __ movzx_w(result, Operand(result, index, times_2, 0));
883 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 __ bind(&one_byte_external);
885 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100886 __ movzx_b(result, Operand(result, index, times_1, 0));
887 __ jmp(&done, Label::kNear);
888
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 // Dispatch on the encoding: one-byte or two-byte.
890 Label one_byte;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100891 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000892 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100893 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
894 __ test(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000895 __ j(not_zero, &one_byte, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100896
897 // Two-byte string.
898 // Load the two-byte character code into the result register.
899 __ movzx_w(result, FieldOperand(string,
900 index,
901 times_2,
902 SeqTwoByteString::kHeaderSize));
903 __ jmp(&done, Label::kNear);
904
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100906 // Load the byte into the result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000907 __ bind(&one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100908 __ movzx_b(result, FieldOperand(string,
909 index,
910 times_1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000911 SeqOneByteString::kHeaderSize));
912 __ bind(&done);
913}
914
915
916static Operand ExpConstant(int index) {
917 return Operand::StaticVariable(ExternalReference::math_exp_constants(index));
918}
919
920
921void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
922 XMMRegister input,
923 XMMRegister result,
924 XMMRegister double_scratch,
925 Register temp1,
926 Register temp2) {
927 DCHECK(!input.is(double_scratch));
928 DCHECK(!input.is(result));
929 DCHECK(!result.is(double_scratch));
930 DCHECK(!temp1.is(temp2));
931 DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
932 DCHECK(!masm->serializer_enabled()); // External references not serializable.
933
934 Label done;
935
936 __ movsd(double_scratch, ExpConstant(0));
937 __ xorpd(result, result);
938 __ ucomisd(double_scratch, input);
939 __ j(above_equal, &done);
940 __ ucomisd(input, ExpConstant(1));
941 __ movsd(result, ExpConstant(2));
942 __ j(above_equal, &done);
943 __ movsd(double_scratch, ExpConstant(3));
944 __ movsd(result, ExpConstant(4));
945 __ mulsd(double_scratch, input);
946 __ addsd(double_scratch, result);
947 __ movd(temp2, double_scratch);
948 __ subsd(double_scratch, result);
949 __ movsd(result, ExpConstant(6));
950 __ mulsd(double_scratch, ExpConstant(5));
951 __ subsd(double_scratch, input);
952 __ subsd(result, double_scratch);
953 __ movsd(input, double_scratch);
954 __ mulsd(input, double_scratch);
955 __ mulsd(result, input);
956 __ mov(temp1, temp2);
957 __ mulsd(result, ExpConstant(7));
958 __ subsd(result, double_scratch);
959 __ add(temp1, Immediate(0x1ff800));
960 __ addsd(result, ExpConstant(8));
961 __ and_(temp2, Immediate(0x7ff));
962 __ shr(temp1, 11);
963 __ shl(temp1, 20);
964 __ movd(input, temp1);
965 __ pshufd(input, input, static_cast<uint8_t>(0xe1)); // Order: 11 10 00 01
966 __ movsd(double_scratch, Operand::StaticArray(
967 temp2, times_8, ExternalReference::math_exp_log_table()));
968 __ orps(input, double_scratch);
969 __ mulsd(result, input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100970 __ bind(&done);
971}
972
973#undef __
974
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000975
976CodeAgingHelper::CodeAgingHelper() {
977 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
978 CodePatcher patcher(young_sequence_.start(), young_sequence_.length());
979 patcher.masm()->push(ebp);
980 patcher.masm()->mov(ebp, esp);
981 patcher.masm()->push(esi);
982 patcher.masm()->push(edi);
983}
984
985
986#ifdef DEBUG
987bool CodeAgingHelper::IsOld(byte* candidate) const {
988 return *candidate == kCallOpcode;
989}
990#endif
991
992
993bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
994 bool result = isolate->code_aging_helper()->IsYoung(sequence);
995 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
996 return result;
997}
998
999
1000void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
1001 MarkingParity* parity) {
1002 if (IsYoungSequence(isolate, sequence)) {
1003 *age = kNoAgeCodeAge;
1004 *parity = NO_MARKING_PARITY;
1005 } else {
1006 sequence++; // Skip the kCallOpcode byte
1007 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
1008 Assembler::kCallTargetAddressOffset;
1009 Code* stub = GetCodeFromTargetAddress(target_address);
1010 GetCodeAgeAndParity(stub, age, parity);
1011 }
1012}
1013
1014
1015void Code::PatchPlatformCodeAge(Isolate* isolate,
1016 byte* sequence,
1017 Code::Age age,
1018 MarkingParity parity) {
1019 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
1020 if (age == kNoAgeCodeAge) {
1021 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
1022 CpuFeatures::FlushICache(sequence, young_length);
1023 } else {
1024 Code* stub = GetCodeAgeStub(isolate, age, parity);
1025 CodePatcher patcher(sequence, young_length);
1026 patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
1027 }
1028}
1029
1030
Steve Blocka7e24c12009-10-30 11:49:00 +00001031} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001032
1033#endif // V8_TARGET_ARCH_IA32