blob: 083f5dba5b32ec8d7eafe173387ce6a5ff01dc67 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/codegen.h"
10#include "src/heap/heap.h"
11#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010016
17// -------------------------------------------------------------------------
18// Platform-specific RuntimeCallHelper functions.
19
Ben Murdochb0fe1622011-05-05 13:52:32 +010020void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010021 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010023 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010024}
25
26
Ben Murdochb0fe1622011-05-05 13:52:32 +010027void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010028 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010030 masm->set_has_frame(false);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010031}
32
33
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010034#define __ masm.
35
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037UnaryMathFunction CreateExpFunction() {
38 if (!FLAG_fast_math) return &std::exp;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010039 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 byte* buffer =
41 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
42 if (buffer == NULL) return &std::exp;
43 ExternalReference::InitializeMathExpData();
Ben Murdoch3ef787d2012-04-12 10:51:47 +010044
45 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
46 // esp[1 * kPointerSize]: raw double input
47 // esp[0 * kPointerSize]: return address
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 {
49 XMMRegister input = xmm1;
50 XMMRegister result = xmm2;
51 __ movsd(input, Operand(esp, 1 * kPointerSize));
52 __ push(eax);
53 __ push(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010054
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 MathExpGenerator::EmitMathExp(&masm, input, result, xmm0, eax, ebx);
56
57 __ pop(ebx);
58 __ pop(eax);
59 __ movsd(Operand(esp, 1 * kPointerSize), result);
60 __ fld_d(Operand(esp, 1 * kPointerSize));
61 __ Ret();
62 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +010063
64 CodeDesc desc;
65 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067
Ben Murdochb8a8cc12014-11-26 15:28:44 +000068 CpuFeatures::FlushICache(buffer, actual_size);
69 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010070 return FUNCTION_CAST<UnaryMathFunction>(buffer);
71}
72
73
74UnaryMathFunction CreateSqrtFunction() {
75 size_t actual_size;
76 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 byte* buffer =
78 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
79 if (buffer == NULL) return &std::sqrt;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010080 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
81 // esp[1 * kPointerSize]: raw double input
82 // esp[0 * kPointerSize]: return address
83 // Move double input into registers.
84 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 __ movsd(xmm0, Operand(esp, 1 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010086 __ sqrtsd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087 __ movsd(Operand(esp, 1 * kPointerSize), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010088 // Load result into floating point register as return value.
89 __ fld_d(Operand(esp, 1 * kPointerSize));
90 __ Ret();
91 }
92
93 CodeDesc desc;
94 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010096
Ben Murdochb8a8cc12014-11-26 15:28:44 +000097 CpuFeatures::FlushICache(buffer, actual_size);
98 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010099 return FUNCTION_CAST<UnaryMathFunction>(buffer);
100}
101
102
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103// Helper functions for CreateMemMoveFunction.
104#undef __
105#define __ ACCESS_MASM(masm)
106
107enum Direction { FORWARD, BACKWARD };
108enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
109
110// Expects registers:
111// esi - source, aligned if alignment == ALIGNED
112// edi - destination, always aligned
113// ecx - count (copy size in bytes)
114// edx - loop count (number of 64 byte chunks)
115void MemMoveEmitMainLoop(MacroAssembler* masm,
116 Label* move_last_15,
117 Direction direction,
118 Alignment alignment) {
119 Register src = esi;
120 Register dst = edi;
121 Register count = ecx;
122 Register loop_count = edx;
123 Label loop, move_last_31, move_last_63;
124 __ cmp(loop_count, 0);
125 __ j(equal, &move_last_63);
126 __ bind(&loop);
127 // Main loop. Copy in 64 byte chunks.
128 if (direction == BACKWARD) __ sub(src, Immediate(0x40));
129 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
130 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
131 __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
132 __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
133 if (direction == FORWARD) __ add(src, Immediate(0x40));
134 if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
135 __ movdqa(Operand(dst, 0x00), xmm0);
136 __ movdqa(Operand(dst, 0x10), xmm1);
137 __ movdqa(Operand(dst, 0x20), xmm2);
138 __ movdqa(Operand(dst, 0x30), xmm3);
139 if (direction == FORWARD) __ add(dst, Immediate(0x40));
140 __ dec(loop_count);
141 __ j(not_zero, &loop);
142 // At most 63 bytes left to copy.
143 __ bind(&move_last_63);
144 __ test(count, Immediate(0x20));
145 __ j(zero, &move_last_31);
146 if (direction == BACKWARD) __ sub(src, Immediate(0x20));
147 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
148 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
149 if (direction == FORWARD) __ add(src, Immediate(0x20));
150 if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
151 __ movdqa(Operand(dst, 0x00), xmm0);
152 __ movdqa(Operand(dst, 0x10), xmm1);
153 if (direction == FORWARD) __ add(dst, Immediate(0x20));
154 // At most 31 bytes left to copy.
155 __ bind(&move_last_31);
156 __ test(count, Immediate(0x10));
157 __ j(zero, move_last_15);
158 if (direction == BACKWARD) __ sub(src, Immediate(0x10));
159 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
160 if (direction == FORWARD) __ add(src, Immediate(0x10));
161 if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
162 __ movdqa(Operand(dst, 0), xmm0);
163 if (direction == FORWARD) __ add(dst, Immediate(0x10));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100164}
165
166
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000167void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
168 __ pop(esi);
169 __ pop(edi);
170 __ ret(0);
171}
172
173
174#undef __
175#define __ masm.
176
177
178class LabelConverter {
179 public:
180 explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
181 int32_t address(Label* l) const {
182 return reinterpret_cast<int32_t>(buffer_) + l->pos();
183 }
184 private:
185 byte* buffer_;
186};
187
188
189MemMoveFunction CreateMemMoveFunction() {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100190 size_t actual_size;
191 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 byte* buffer =
193 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
194 if (buffer == NULL) return NULL;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100195 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000196 LabelConverter conv(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100197
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000198 // Generated code is put into a fixed, unmovable buffer, and not into
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100199 // the V8 heap. We can't, and don't, refer to any relocatable addresses
200 // (e.g. the JavaScript nan-object).
201
202 // 32-bit C declaration function calls pass arguments on stack.
203
204 // Stack layout:
205 // esp[12]: Third argument, size.
206 // esp[8]: Second argument, source pointer.
207 // esp[4]: First argument, destination pointer.
208 // esp[0]: return address
209
210 const int kDestinationOffset = 1 * kPointerSize;
211 const int kSourceOffset = 2 * kPointerSize;
212 const int kSizeOffset = 3 * kPointerSize;
213
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 // When copying up to this many bytes, use special "small" handlers.
215 const size_t kSmallCopySize = 8;
216 // When copying up to this many bytes, use special "medium" handlers.
217 const size_t kMediumCopySize = 63;
218 // When non-overlapping region of src and dst is less than this,
219 // use a more careful implementation (slightly slower).
220 const size_t kMinMoveDistance = 16;
221 // Note that these values are dictated by the implementation below,
222 // do not just change them and hope things will work!
223
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100224 int stack_offset = 0; // Update if we change the stack height.
225
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226 Label backward, backward_much_overlap;
227 Label forward_much_overlap, small_size, medium_size, pop_and_return;
228 __ push(edi);
229 __ push(esi);
230 stack_offset += 2 * kPointerSize;
231 Register dst = edi;
232 Register src = esi;
233 Register count = ecx;
234 Register loop_count = edx;
235 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
236 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
237 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100238
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239 __ cmp(dst, src);
240 __ j(equal, &pop_and_return);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100241
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 __ prefetch(Operand(src, 0), 1);
243 __ cmp(count, kSmallCopySize);
244 __ j(below_equal, &small_size);
245 __ cmp(count, kMediumCopySize);
246 __ j(below_equal, &medium_size);
247 __ cmp(dst, src);
248 __ j(above, &backward);
249
250 {
251 // |dst| is a lower address than |src|. Copy front-to-back.
252 Label unaligned_source, move_last_15, skip_last_move;
253 __ mov(eax, src);
254 __ sub(eax, dst);
255 __ cmp(eax, kMinMoveDistance);
256 __ j(below, &forward_much_overlap);
257 // Copy first 16 bytes.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100258 __ movdqu(xmm0, Operand(src, 0));
259 __ movdqu(Operand(dst, 0), xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000260 // Determine distance to alignment: 16 - (dst & 0xF).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100261 __ mov(edx, dst);
262 __ and_(edx, 0xF);
263 __ neg(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100264 __ add(edx, Immediate(16));
265 __ add(dst, edx);
266 __ add(src, edx);
267 __ sub(count, edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000268 // dst is now aligned. Main copy loop.
269 __ mov(loop_count, count);
270 __ shr(loop_count, 6);
271 // Check if src is also aligned.
272 __ test(src, Immediate(0xF));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100273 __ j(not_zero, &unaligned_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274 // Copy loop for aligned source and destination.
275 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_ALIGNED);
276 // At most 15 bytes to copy. Copy 16 bytes at end of string.
277 __ bind(&move_last_15);
278 __ and_(count, 0xF);
279 __ j(zero, &skip_last_move, Label::kNear);
280 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
281 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
282 __ bind(&skip_last_move);
283 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100284
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000285 // Copy loop for unaligned source and aligned destination.
286 __ bind(&unaligned_source);
287 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
288 __ jmp(&move_last_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100289
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290 // Less than kMinMoveDistance offset between dst and src.
291 Label loop_until_aligned, last_15_much_overlap;
292 __ bind(&loop_until_aligned);
293 __ mov_b(eax, Operand(src, 0));
294 __ inc(src);
295 __ mov_b(Operand(dst, 0), eax);
296 __ inc(dst);
297 __ dec(count);
298 __ bind(&forward_much_overlap); // Entry point into this block.
299 __ test(dst, Immediate(0xF));
300 __ j(not_zero, &loop_until_aligned);
301 // dst is now aligned, src can't be. Main copy loop.
302 __ mov(loop_count, count);
303 __ shr(loop_count, 6);
304 MemMoveEmitMainLoop(&masm, &last_15_much_overlap,
305 FORWARD, MOVE_UNALIGNED);
306 __ bind(&last_15_much_overlap);
307 __ and_(count, 0xF);
308 __ j(zero, &pop_and_return);
309 __ cmp(count, kSmallCopySize);
310 __ j(below_equal, &small_size);
311 __ jmp(&medium_size);
312 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100313
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314 {
315 // |dst| is a higher address than |src|. Copy backwards.
316 Label unaligned_source, move_first_15, skip_last_move;
317 __ bind(&backward);
318 // |dst| and |src| always point to the end of what's left to copy.
319 __ add(dst, count);
320 __ add(src, count);
321 __ mov(eax, dst);
322 __ sub(eax, src);
323 __ cmp(eax, kMinMoveDistance);
324 __ j(below, &backward_much_overlap);
325 // Copy last 16 bytes.
326 __ movdqu(xmm0, Operand(src, -0x10));
327 __ movdqu(Operand(dst, -0x10), xmm0);
328 // Find distance to alignment: dst & 0xF
329 __ mov(edx, dst);
330 __ and_(edx, 0xF);
331 __ sub(dst, edx);
332 __ sub(src, edx);
333 __ sub(count, edx);
334 // dst is now aligned. Main copy loop.
335 __ mov(loop_count, count);
336 __ shr(loop_count, 6);
337 // Check if src is also aligned.
338 __ test(src, Immediate(0xF));
339 __ j(not_zero, &unaligned_source);
340 // Copy loop for aligned source and destination.
341 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
342 // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
343 __ bind(&move_first_15);
344 __ and_(count, 0xF);
345 __ j(zero, &skip_last_move, Label::kNear);
346 __ sub(src, count);
347 __ sub(dst, count);
348 __ movdqu(xmm0, Operand(src, 0));
349 __ movdqu(Operand(dst, 0), xmm0);
350 __ bind(&skip_last_move);
351 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100352
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000353 // Copy loop for unaligned source and aligned destination.
354 __ bind(&unaligned_source);
355 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
356 __ jmp(&move_first_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100357
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 // Less than kMinMoveDistance offset between dst and src.
359 Label loop_until_aligned, first_15_much_overlap;
360 __ bind(&loop_until_aligned);
361 __ dec(src);
362 __ dec(dst);
363 __ mov_b(eax, Operand(src, 0));
364 __ mov_b(Operand(dst, 0), eax);
365 __ dec(count);
366 __ bind(&backward_much_overlap); // Entry point into this block.
367 __ test(dst, Immediate(0xF));
368 __ j(not_zero, &loop_until_aligned);
369 // dst is now aligned, src can't be. Main copy loop.
370 __ mov(loop_count, count);
371 __ shr(loop_count, 6);
372 MemMoveEmitMainLoop(&masm, &first_15_much_overlap,
373 BACKWARD, MOVE_UNALIGNED);
374 __ bind(&first_15_much_overlap);
375 __ and_(count, 0xF);
376 __ j(zero, &pop_and_return);
377 // Small/medium handlers expect dst/src to point to the beginning.
378 __ sub(dst, count);
379 __ sub(src, count);
380 __ cmp(count, kSmallCopySize);
381 __ j(below_equal, &small_size);
382 __ jmp(&medium_size);
383 }
384 {
385 // Special handlers for 9 <= copy_size < 64. No assumptions about
386 // alignment or move distance, so all reads must be unaligned and
387 // must happen before any writes.
388 Label medium_handlers, f9_16, f17_32, f33_48, f49_63;
389
390 __ bind(&f9_16);
391 __ movsd(xmm0, Operand(src, 0));
392 __ movsd(xmm1, Operand(src, count, times_1, -8));
393 __ movsd(Operand(dst, 0), xmm0);
394 __ movsd(Operand(dst, count, times_1, -8), xmm1);
395 MemMoveEmitPopAndReturn(&masm);
396
397 __ bind(&f17_32);
398 __ movdqu(xmm0, Operand(src, 0));
399 __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
400 __ movdqu(Operand(dst, 0x00), xmm0);
401 __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
402 MemMoveEmitPopAndReturn(&masm);
403
404 __ bind(&f33_48);
405 __ movdqu(xmm0, Operand(src, 0x00));
406 __ movdqu(xmm1, Operand(src, 0x10));
407 __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
408 __ movdqu(Operand(dst, 0x00), xmm0);
409 __ movdqu(Operand(dst, 0x10), xmm1);
410 __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
411 MemMoveEmitPopAndReturn(&masm);
412
413 __ bind(&f49_63);
414 __ movdqu(xmm0, Operand(src, 0x00));
415 __ movdqu(xmm1, Operand(src, 0x10));
416 __ movdqu(xmm2, Operand(src, 0x20));
417 __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
418 __ movdqu(Operand(dst, 0x00), xmm0);
419 __ movdqu(Operand(dst, 0x10), xmm1);
420 __ movdqu(Operand(dst, 0x20), xmm2);
421 __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
422 MemMoveEmitPopAndReturn(&masm);
423
424 __ bind(&medium_handlers);
425 __ dd(conv.address(&f9_16));
426 __ dd(conv.address(&f17_32));
427 __ dd(conv.address(&f33_48));
428 __ dd(conv.address(&f49_63));
429
430 __ bind(&medium_size); // Entry point into this block.
431 __ mov(eax, count);
432 __ dec(eax);
433 __ shr(eax, 4);
434 if (FLAG_debug_code) {
435 Label ok;
436 __ cmp(eax, 3);
437 __ j(below_equal, &ok);
438 __ int3();
439 __ bind(&ok);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100440 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441 __ mov(eax, Operand(eax, times_4, conv.address(&medium_handlers)));
442 __ jmp(eax);
443 }
444 {
445 // Specialized copiers for copy_size <= 8 bytes.
446 Label small_handlers, f0, f1, f2, f3, f4, f5_8;
447 __ bind(&f0);
448 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100449
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 __ bind(&f1);
451 __ mov_b(eax, Operand(src, 0));
452 __ mov_b(Operand(dst, 0), eax);
453 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100454
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 __ bind(&f2);
456 __ mov_w(eax, Operand(src, 0));
457 __ mov_w(Operand(dst, 0), eax);
458 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100459
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460 __ bind(&f3);
461 __ mov_w(eax, Operand(src, 0));
462 __ mov_b(edx, Operand(src, 2));
463 __ mov_w(Operand(dst, 0), eax);
464 __ mov_b(Operand(dst, 2), edx);
465 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100466
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 __ bind(&f4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100468 __ mov(eax, Operand(src, 0));
469 __ mov(Operand(dst, 0), eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000470 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100471
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 __ bind(&f5_8);
473 __ mov(eax, Operand(src, 0));
474 __ mov(edx, Operand(src, count, times_1, -4));
475 __ mov(Operand(dst, 0), eax);
476 __ mov(Operand(dst, count, times_1, -4), edx);
477 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100478
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479 __ bind(&small_handlers);
480 __ dd(conv.address(&f0));
481 __ dd(conv.address(&f1));
482 __ dd(conv.address(&f2));
483 __ dd(conv.address(&f3));
484 __ dd(conv.address(&f4));
485 __ dd(conv.address(&f5_8));
486 __ dd(conv.address(&f5_8));
487 __ dd(conv.address(&f5_8));
488 __ dd(conv.address(&f5_8));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100489
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 __ bind(&small_size); // Entry point into this block.
491 if (FLAG_debug_code) {
492 Label ok;
493 __ cmp(count, 8);
494 __ j(below_equal, &ok);
495 __ int3();
496 __ bind(&ok);
497 }
498 __ mov(eax, Operand(count, times_4, conv.address(&small_handlers)));
499 __ jmp(eax);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100500 }
501
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502 __ bind(&pop_and_return);
503 MemMoveEmitPopAndReturn(&masm);
504
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100505 CodeDesc desc;
506 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 DCHECK(!RelocInfo::RequiresRelocation(desc));
508 CpuFeatures::FlushICache(buffer, actual_size);
509 base::OS::ProtectCode(buffer, actual_size);
510 // TODO(jkummerow): It would be nice to register this code creation event
511 // with the PROFILE / GDBJIT system.
512 return FUNCTION_CAST<MemMoveFunction>(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100513}
514
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100516#undef __
517
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100518// -------------------------------------------------------------------------
519// Code generators
520
521#define __ ACCESS_MASM(masm)
522
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000523
524void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
525 MacroAssembler* masm,
526 Register receiver,
527 Register key,
528 Register value,
529 Register target_map,
530 AllocationSiteMode mode,
531 Label* allocation_memento_found) {
532 Register scratch = edi;
533 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
534
535 if (mode == TRACK_ALLOCATION_SITE) {
536 DCHECK(allocation_memento_found != NULL);
537 __ JumpIfJSArrayHasAllocationMemento(
538 receiver, scratch, allocation_memento_found);
539 }
540
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100541 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000542 __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
543 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100544 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000545 target_map,
546 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100547 kDontSaveFPRegs,
548 EMIT_REMEMBERED_SET,
549 OMIT_SMI_CHECK);
550}
551
552
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553void ElementsTransitionGenerator::GenerateSmiToDouble(
554 MacroAssembler* masm,
555 Register receiver,
556 Register key,
557 Register value,
558 Register target_map,
559 AllocationSiteMode mode,
560 Label* fail) {
561 // Return address is on the stack.
562 DCHECK(receiver.is(edx));
563 DCHECK(key.is(ecx));
564 DCHECK(value.is(eax));
565 DCHECK(target_map.is(ebx));
566
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100567 Label loop, entry, convert_hole, gc_required, only_change_map;
568
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569 if (mode == TRACK_ALLOCATION_SITE) {
570 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
571 }
572
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100573 // Check for empty arrays, which only require a map transition and no changes
574 // to the backing store.
575 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
576 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
577 __ j(equal, &only_change_map);
578
579 __ push(eax);
580 __ push(ebx);
581
582 __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
583
584 // Allocate new FixedDoubleArray.
585 // edx: receiver
586 // edi: length of source FixedArray (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587 AllocationFlags flags =
588 static_cast<AllocationFlags>(TAG_OBJECT | DOUBLE_ALIGNMENT);
589 __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
590 REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100591
592 // eax: destination FixedDoubleArray
593 // edi: number of elements
594 // edx: receiver
595 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
596 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
597 __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
598 __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
599 // Replace receiver's backing store with newly created FixedDoubleArray.
600 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
601 __ mov(ebx, eax);
602 __ RecordWriteField(edx,
603 JSObject::kElementsOffset,
604 ebx,
605 edi,
606 kDontSaveFPRegs,
607 EMIT_REMEMBERED_SET,
608 OMIT_SMI_CHECK);
609
610 __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
611
612 // Prepare for conversion loop.
613 ExternalReference canonical_the_hole_nan_reference =
614 ExternalReference::address_of_the_hole_nan();
615 XMMRegister the_hole_nan = xmm1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000616 __ movsd(the_hole_nan,
617 Operand::StaticVariable(canonical_the_hole_nan_reference));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100618 __ jmp(&entry);
619
620 // Call into runtime if GC is required.
621 __ bind(&gc_required);
622 // Restore registers before jumping into runtime.
623 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
624 __ pop(ebx);
625 __ pop(eax);
626 __ jmp(fail);
627
628 // Convert and copy elements
629 // esi: source FixedArray
630 __ bind(&loop);
631 __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
632 // ebx: current element from source
633 // edi: index of current element
634 __ JumpIfNotSmi(ebx, &convert_hole);
635
636 // Normal smi, convert it to double and store.
637 __ SmiUntag(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 __ Cvtsi2sd(xmm0, ebx);
639 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
640 xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100641 __ jmp(&entry);
642
643 // Found hole, store hole_nan_as_double instead.
644 __ bind(&convert_hole);
645
646 if (FLAG_debug_code) {
647 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648 __ Assert(equal, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100649 }
650
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000651 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
652 the_hole_nan);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100653
654 __ bind(&entry);
655 __ sub(edi, Immediate(Smi::FromInt(1)));
656 __ j(not_sign, &loop);
657
658 __ pop(ebx);
659 __ pop(eax);
660
661 // Restore esi.
662 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
663
664 __ bind(&only_change_map);
665 // eax: value
666 // ebx: target map
667 // Set transitioned map.
668 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
669 __ RecordWriteField(edx,
670 HeapObject::kMapOffset,
671 ebx,
672 edi,
673 kDontSaveFPRegs,
674 OMIT_REMEMBERED_SET,
675 OMIT_SMI_CHECK);
676}
677
678
679void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000680 MacroAssembler* masm,
681 Register receiver,
682 Register key,
683 Register value,
684 Register target_map,
685 AllocationSiteMode mode,
686 Label* fail) {
687 // Return address is on the stack.
688 DCHECK(receiver.is(edx));
689 DCHECK(key.is(ecx));
690 DCHECK(value.is(eax));
691 DCHECK(target_map.is(ebx));
692
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100693 Label loop, entry, convert_hole, gc_required, only_change_map, success;
694
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 if (mode == TRACK_ALLOCATION_SITE) {
696 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
697 }
698
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100699 // Check for empty arrays, which only require a map transition and no changes
700 // to the backing store.
701 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
702 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
703 __ j(equal, &only_change_map);
704
705 __ push(eax);
706 __ push(edx);
707 __ push(ebx);
708
709 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
710
711 // Allocate new FixedArray.
712 // ebx: length of source FixedDoubleArray (smi-tagged)
713 __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000714 __ Allocate(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100715
716 // eax: destination FixedArray
717 // ebx: number of elements
718 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
719 Immediate(masm->isolate()->factory()->fixed_array_map()));
720 __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
721 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
722
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400723 // Allocating heap numbers in the loop below can fail and cause a jump to
724 // gc_required. We can't leave a partly initialized FixedArray behind,
725 // so pessimistically fill it with holes now.
726 Label initialization_loop, initialization_loop_entry;
727 __ jmp(&initialization_loop_entry, Label::kNear);
728 __ bind(&initialization_loop);
729 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
730 masm->isolate()->factory()->the_hole_value());
731 __ bind(&initialization_loop_entry);
732 __ sub(ebx, Immediate(Smi::FromInt(1)));
733 __ j(not_sign, &initialization_loop);
734
735 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100736 __ jmp(&entry);
737
738 // ebx: target map
739 // edx: receiver
740 // Set transitioned map.
741 __ bind(&only_change_map);
742 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
743 __ RecordWriteField(edx,
744 HeapObject::kMapOffset,
745 ebx,
746 edi,
747 kDontSaveFPRegs,
748 OMIT_REMEMBERED_SET,
749 OMIT_SMI_CHECK);
750 __ jmp(&success);
751
752 // Call into runtime if GC is required.
753 __ bind(&gc_required);
754 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
755 __ pop(ebx);
756 __ pop(edx);
757 __ pop(eax);
758 __ jmp(fail);
759
760 // Box doubles into heap numbers.
761 // edi: source FixedDoubleArray
762 // eax: destination FixedArray
763 __ bind(&loop);
764 // ebx: index of current element (smi-tagged)
765 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
766 __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
767 __ j(equal, &convert_hole);
768
769 // Non-hole double, copy value into a heap number.
770 __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
771 // edx: new heap number
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772 __ movsd(xmm0,
773 FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
774 __ movsd(FieldOperand(edx, HeapNumber::kValueOffset), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100775 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
776 __ mov(esi, ebx);
777 __ RecordWriteArray(eax,
778 edx,
779 esi,
780 kDontSaveFPRegs,
781 EMIT_REMEMBERED_SET,
782 OMIT_SMI_CHECK);
783 __ jmp(&entry, Label::kNear);
784
785 // Replace the-hole NaN with the-hole pointer.
786 __ bind(&convert_hole);
787 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
788 masm->isolate()->factory()->the_hole_value());
789
790 __ bind(&entry);
791 __ sub(ebx, Immediate(Smi::FromInt(1)));
792 __ j(not_sign, &loop);
793
794 __ pop(ebx);
795 __ pop(edx);
796 // ebx: target map
797 // edx: receiver
798 // Set transitioned map.
799 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
800 __ RecordWriteField(edx,
801 HeapObject::kMapOffset,
802 ebx,
803 edi,
804 kDontSaveFPRegs,
805 OMIT_REMEMBERED_SET,
806 OMIT_SMI_CHECK);
807 // Replace receiver's backing store with newly created and filled FixedArray.
808 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
809 __ RecordWriteField(edx,
810 JSObject::kElementsOffset,
811 eax,
812 edi,
813 kDontSaveFPRegs,
814 EMIT_REMEMBERED_SET,
815 OMIT_SMI_CHECK);
816
817 // Restore registers.
818 __ pop(eax);
819 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
820
821 __ bind(&success);
822}
823
824
825void StringCharLoadGenerator::Generate(MacroAssembler* masm,
826 Factory* factory,
827 Register string,
828 Register index,
829 Register result,
830 Label* call_runtime) {
831 // Fetch the instance type of the receiver into result register.
832 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
833 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
834
835 // We need special handling for indirect strings.
836 Label check_sequential;
837 __ test(result, Immediate(kIsIndirectStringMask));
838 __ j(zero, &check_sequential, Label::kNear);
839
840 // Dispatch on the indirect string shape: slice or cons.
841 Label cons_string;
842 __ test(result, Immediate(kSlicedNotConsMask));
843 __ j(zero, &cons_string, Label::kNear);
844
845 // Handle slices.
846 Label indirect_string_loaded;
847 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
848 __ SmiUntag(result);
849 __ add(index, result);
850 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
851 __ jmp(&indirect_string_loaded, Label::kNear);
852
853 // Handle cons strings.
854 // Check whether the right hand side is the empty string (i.e. if
855 // this is really a flat string in a cons string). If that is not
856 // the case we would rather go to the runtime system now to flatten
857 // the string.
858 __ bind(&cons_string);
859 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
860 Immediate(factory->empty_string()));
861 __ j(not_equal, call_runtime);
862 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
863
864 __ bind(&indirect_string_loaded);
865 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
866 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
867
868 // Distinguish sequential and external strings. Only these two string
869 // representations can reach here (slices and flat cons strings have been
870 // reduced to the underlying sequential or external string).
871 Label seq_string;
872 __ bind(&check_sequential);
873 STATIC_ASSERT(kSeqStringTag == 0);
874 __ test(result, Immediate(kStringRepresentationMask));
875 __ j(zero, &seq_string, Label::kNear);
876
877 // Handle external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000878 Label one_byte_external, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100879 if (FLAG_debug_code) {
880 // Assert that we do not have a cons or slice (indirect strings) here.
881 // Sequential strings have already been ruled out.
882 __ test(result, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000883 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100884 }
885 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100887 __ test_b(result, kShortExternalStringMask);
888 __ j(not_zero, call_runtime);
889 // Check encoding.
890 STATIC_ASSERT(kTwoByteStringTag == 0);
891 __ test_b(result, kStringEncodingMask);
892 __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000893 __ j(not_equal, &one_byte_external, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100894 // Two-byte string.
895 __ movzx_w(result, Operand(result, index, times_2, 0));
896 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000897 __ bind(&one_byte_external);
898 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100899 __ movzx_b(result, Operand(result, index, times_1, 0));
900 __ jmp(&done, Label::kNear);
901
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 // Dispatch on the encoding: one-byte or two-byte.
903 Label one_byte;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100904 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100906 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
907 __ test(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 __ j(not_zero, &one_byte, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100909
910 // Two-byte string.
911 // Load the two-byte character code into the result register.
912 __ movzx_w(result, FieldOperand(string,
913 index,
914 times_2,
915 SeqTwoByteString::kHeaderSize));
916 __ jmp(&done, Label::kNear);
917
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100919 // Load the byte into the result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 __ bind(&one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100921 __ movzx_b(result, FieldOperand(string,
922 index,
923 times_1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000924 SeqOneByteString::kHeaderSize));
925 __ bind(&done);
926}
927
928
929static Operand ExpConstant(int index) {
930 return Operand::StaticVariable(ExternalReference::math_exp_constants(index));
931}
932
933
934void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
935 XMMRegister input,
936 XMMRegister result,
937 XMMRegister double_scratch,
938 Register temp1,
939 Register temp2) {
940 DCHECK(!input.is(double_scratch));
941 DCHECK(!input.is(result));
942 DCHECK(!result.is(double_scratch));
943 DCHECK(!temp1.is(temp2));
944 DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
945 DCHECK(!masm->serializer_enabled()); // External references not serializable.
946
947 Label done;
948
949 __ movsd(double_scratch, ExpConstant(0));
950 __ xorpd(result, result);
951 __ ucomisd(double_scratch, input);
952 __ j(above_equal, &done);
953 __ ucomisd(input, ExpConstant(1));
954 __ movsd(result, ExpConstant(2));
955 __ j(above_equal, &done);
956 __ movsd(double_scratch, ExpConstant(3));
957 __ movsd(result, ExpConstant(4));
958 __ mulsd(double_scratch, input);
959 __ addsd(double_scratch, result);
960 __ movd(temp2, double_scratch);
961 __ subsd(double_scratch, result);
962 __ movsd(result, ExpConstant(6));
963 __ mulsd(double_scratch, ExpConstant(5));
964 __ subsd(double_scratch, input);
965 __ subsd(result, double_scratch);
966 __ movsd(input, double_scratch);
967 __ mulsd(input, double_scratch);
968 __ mulsd(result, input);
969 __ mov(temp1, temp2);
970 __ mulsd(result, ExpConstant(7));
971 __ subsd(result, double_scratch);
972 __ add(temp1, Immediate(0x1ff800));
973 __ addsd(result, ExpConstant(8));
974 __ and_(temp2, Immediate(0x7ff));
975 __ shr(temp1, 11);
976 __ shl(temp1, 20);
977 __ movd(input, temp1);
978 __ pshufd(input, input, static_cast<uint8_t>(0xe1)); // Order: 11 10 00 01
979 __ movsd(double_scratch, Operand::StaticArray(
980 temp2, times_8, ExternalReference::math_exp_log_table()));
981 __ orps(input, double_scratch);
982 __ mulsd(result, input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100983 __ bind(&done);
984}
985
986#undef __
987
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000988
989CodeAgingHelper::CodeAgingHelper() {
990 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
991 CodePatcher patcher(young_sequence_.start(), young_sequence_.length());
992 patcher.masm()->push(ebp);
993 patcher.masm()->mov(ebp, esp);
994 patcher.masm()->push(esi);
995 patcher.masm()->push(edi);
996}
997
998
999#ifdef DEBUG
1000bool CodeAgingHelper::IsOld(byte* candidate) const {
1001 return *candidate == kCallOpcode;
1002}
1003#endif
1004
1005
1006bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
1007 bool result = isolate->code_aging_helper()->IsYoung(sequence);
1008 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
1009 return result;
1010}
1011
1012
1013void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
1014 MarkingParity* parity) {
1015 if (IsYoungSequence(isolate, sequence)) {
1016 *age = kNoAgeCodeAge;
1017 *parity = NO_MARKING_PARITY;
1018 } else {
1019 sequence++; // Skip the kCallOpcode byte
1020 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
1021 Assembler::kCallTargetAddressOffset;
1022 Code* stub = GetCodeFromTargetAddress(target_address);
1023 GetCodeAgeAndParity(stub, age, parity);
1024 }
1025}
1026
1027
1028void Code::PatchPlatformCodeAge(Isolate* isolate,
1029 byte* sequence,
1030 Code::Age age,
1031 MarkingParity parity) {
1032 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
1033 if (age == kNoAgeCodeAge) {
1034 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
1035 CpuFeatures::FlushICache(sequence, young_length);
1036 } else {
1037 Code* stub = GetCodeAgeStub(isolate, age, parity);
1038 CodePatcher patcher(sequence, young_length);
1039 patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
1040 }
1041}
1042
1043
Steve Blocka7e24c12009-10-30 11:49:00 +00001044} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01001045
1046#endif // V8_TARGET_ARCH_IA32