blob: 2f94f356650d2628c1fece365582a90b6883dbe2 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/ia32/codegen-ia32.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/codegen.h"
10#include "src/heap/heap.h"
11#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010016
17// -------------------------------------------------------------------------
18// Platform-specific RuntimeCallHelper functions.
19
Ben Murdochb0fe1622011-05-05 13:52:32 +010020void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010021 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010023 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010024}
25
26
Ben Murdochb0fe1622011-05-05 13:52:32 +010027void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010028 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010030 masm->set_has_frame(false);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010031}
32
33
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010034#define __ masm.
35
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037UnaryMathFunctionWithIsolate CreateExpFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010038 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039 byte* buffer =
40 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 if (buffer == nullptr) return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000042 ExternalReference::InitializeMathExpData();
Ben Murdoch3ef787d2012-04-12 10:51:47 +010043
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
45 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010046 // esp[1 * kPointerSize]: raw double input
47 // esp[0 * kPointerSize]: return address
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 {
49 XMMRegister input = xmm1;
50 XMMRegister result = xmm2;
51 __ movsd(input, Operand(esp, 1 * kPointerSize));
52 __ push(eax);
53 __ push(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010054
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 MathExpGenerator::EmitMathExp(&masm, input, result, xmm0, eax, ebx);
56
57 __ pop(ebx);
58 __ pop(eax);
59 __ movsd(Operand(esp, 1 * kPointerSize), result);
60 __ fld_d(Operand(esp, 1 * kPointerSize));
61 __ Ret();
62 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +010063
64 CodeDesc desc;
65 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000069 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010071}
72
73
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000074UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010075 size_t actual_size;
76 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 byte* buffer =
78 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000079 if (buffer == nullptr) return nullptr;
80 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
81 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010082 // esp[1 * kPointerSize]: raw double input
83 // esp[0 * kPointerSize]: return address
84 // Move double input into registers.
85 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086 __ movsd(xmm0, Operand(esp, 1 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010087 __ sqrtsd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 __ movsd(Operand(esp, 1 * kPointerSize), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010089 // Load result into floating point register as return value.
90 __ fld_d(Operand(esp, 1 * kPointerSize));
91 __ Ret();
92 }
93
94 CodeDesc desc;
95 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010097
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000098 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100101}
102
103
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104// Helper functions for CreateMemMoveFunction.
105#undef __
106#define __ ACCESS_MASM(masm)
107
108enum Direction { FORWARD, BACKWARD };
109enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
110
111// Expects registers:
112// esi - source, aligned if alignment == ALIGNED
113// edi - destination, always aligned
114// ecx - count (copy size in bytes)
115// edx - loop count (number of 64 byte chunks)
116void MemMoveEmitMainLoop(MacroAssembler* masm,
117 Label* move_last_15,
118 Direction direction,
119 Alignment alignment) {
120 Register src = esi;
121 Register dst = edi;
122 Register count = ecx;
123 Register loop_count = edx;
124 Label loop, move_last_31, move_last_63;
125 __ cmp(loop_count, 0);
126 __ j(equal, &move_last_63);
127 __ bind(&loop);
128 // Main loop. Copy in 64 byte chunks.
129 if (direction == BACKWARD) __ sub(src, Immediate(0x40));
130 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
131 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
132 __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
133 __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
134 if (direction == FORWARD) __ add(src, Immediate(0x40));
135 if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
136 __ movdqa(Operand(dst, 0x00), xmm0);
137 __ movdqa(Operand(dst, 0x10), xmm1);
138 __ movdqa(Operand(dst, 0x20), xmm2);
139 __ movdqa(Operand(dst, 0x30), xmm3);
140 if (direction == FORWARD) __ add(dst, Immediate(0x40));
141 __ dec(loop_count);
142 __ j(not_zero, &loop);
143 // At most 63 bytes left to copy.
144 __ bind(&move_last_63);
145 __ test(count, Immediate(0x20));
146 __ j(zero, &move_last_31);
147 if (direction == BACKWARD) __ sub(src, Immediate(0x20));
148 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
149 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
150 if (direction == FORWARD) __ add(src, Immediate(0x20));
151 if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
152 __ movdqa(Operand(dst, 0x00), xmm0);
153 __ movdqa(Operand(dst, 0x10), xmm1);
154 if (direction == FORWARD) __ add(dst, Immediate(0x20));
155 // At most 31 bytes left to copy.
156 __ bind(&move_last_31);
157 __ test(count, Immediate(0x10));
158 __ j(zero, move_last_15);
159 if (direction == BACKWARD) __ sub(src, Immediate(0x10));
160 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
161 if (direction == FORWARD) __ add(src, Immediate(0x10));
162 if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
163 __ movdqa(Operand(dst, 0), xmm0);
164 if (direction == FORWARD) __ add(dst, Immediate(0x10));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100165}
166
167
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
169 __ pop(esi);
170 __ pop(edi);
171 __ ret(0);
172}
173
174
175#undef __
176#define __ masm.
177
178
179class LabelConverter {
180 public:
181 explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
182 int32_t address(Label* l) const {
183 return reinterpret_cast<int32_t>(buffer_) + l->pos();
184 }
185 private:
186 byte* buffer_;
187};
188
189
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100191 size_t actual_size;
192 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 byte* buffer =
194 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 if (buffer == nullptr) return nullptr;
196 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
197 CodeObjectRequired::kNo);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000198 LabelConverter conv(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100199
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000200 // Generated code is put into a fixed, unmovable buffer, and not into
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 // the V8 heap. We can't, and don't, refer to any relocatable addresses
202 // (e.g. the JavaScript nan-object).
203
204 // 32-bit C declaration function calls pass arguments on stack.
205
206 // Stack layout:
207 // esp[12]: Third argument, size.
208 // esp[8]: Second argument, source pointer.
209 // esp[4]: First argument, destination pointer.
210 // esp[0]: return address
211
212 const int kDestinationOffset = 1 * kPointerSize;
213 const int kSourceOffset = 2 * kPointerSize;
214 const int kSizeOffset = 3 * kPointerSize;
215
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000216 // When copying up to this many bytes, use special "small" handlers.
217 const size_t kSmallCopySize = 8;
218 // When copying up to this many bytes, use special "medium" handlers.
219 const size_t kMediumCopySize = 63;
220 // When non-overlapping region of src and dst is less than this,
221 // use a more careful implementation (slightly slower).
222 const size_t kMinMoveDistance = 16;
223 // Note that these values are dictated by the implementation below,
224 // do not just change them and hope things will work!
225
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100226 int stack_offset = 0; // Update if we change the stack height.
227
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000228 Label backward, backward_much_overlap;
229 Label forward_much_overlap, small_size, medium_size, pop_and_return;
230 __ push(edi);
231 __ push(esi);
232 stack_offset += 2 * kPointerSize;
233 Register dst = edi;
234 Register src = esi;
235 Register count = ecx;
236 Register loop_count = edx;
237 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
238 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
239 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100240
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241 __ cmp(dst, src);
242 __ j(equal, &pop_and_return);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100243
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 __ prefetch(Operand(src, 0), 1);
245 __ cmp(count, kSmallCopySize);
246 __ j(below_equal, &small_size);
247 __ cmp(count, kMediumCopySize);
248 __ j(below_equal, &medium_size);
249 __ cmp(dst, src);
250 __ j(above, &backward);
251
252 {
253 // |dst| is a lower address than |src|. Copy front-to-back.
254 Label unaligned_source, move_last_15, skip_last_move;
255 __ mov(eax, src);
256 __ sub(eax, dst);
257 __ cmp(eax, kMinMoveDistance);
258 __ j(below, &forward_much_overlap);
259 // Copy first 16 bytes.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100260 __ movdqu(xmm0, Operand(src, 0));
261 __ movdqu(Operand(dst, 0), xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 // Determine distance to alignment: 16 - (dst & 0xF).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100263 __ mov(edx, dst);
264 __ and_(edx, 0xF);
265 __ neg(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100266 __ add(edx, Immediate(16));
267 __ add(dst, edx);
268 __ add(src, edx);
269 __ sub(count, edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000270 // dst is now aligned. Main copy loop.
271 __ mov(loop_count, count);
272 __ shr(loop_count, 6);
273 // Check if src is also aligned.
274 __ test(src, Immediate(0xF));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100275 __ j(not_zero, &unaligned_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276 // Copy loop for aligned source and destination.
277 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_ALIGNED);
278 // At most 15 bytes to copy. Copy 16 bytes at end of string.
279 __ bind(&move_last_15);
280 __ and_(count, 0xF);
281 __ j(zero, &skip_last_move, Label::kNear);
282 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
283 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
284 __ bind(&skip_last_move);
285 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100286
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000287 // Copy loop for unaligned source and aligned destination.
288 __ bind(&unaligned_source);
289 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
290 __ jmp(&move_last_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100291
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000292 // Less than kMinMoveDistance offset between dst and src.
293 Label loop_until_aligned, last_15_much_overlap;
294 __ bind(&loop_until_aligned);
295 __ mov_b(eax, Operand(src, 0));
296 __ inc(src);
297 __ mov_b(Operand(dst, 0), eax);
298 __ inc(dst);
299 __ dec(count);
300 __ bind(&forward_much_overlap); // Entry point into this block.
301 __ test(dst, Immediate(0xF));
302 __ j(not_zero, &loop_until_aligned);
303 // dst is now aligned, src can't be. Main copy loop.
304 __ mov(loop_count, count);
305 __ shr(loop_count, 6);
306 MemMoveEmitMainLoop(&masm, &last_15_much_overlap,
307 FORWARD, MOVE_UNALIGNED);
308 __ bind(&last_15_much_overlap);
309 __ and_(count, 0xF);
310 __ j(zero, &pop_and_return);
311 __ cmp(count, kSmallCopySize);
312 __ j(below_equal, &small_size);
313 __ jmp(&medium_size);
314 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100315
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 {
317 // |dst| is a higher address than |src|. Copy backwards.
318 Label unaligned_source, move_first_15, skip_last_move;
319 __ bind(&backward);
320 // |dst| and |src| always point to the end of what's left to copy.
321 __ add(dst, count);
322 __ add(src, count);
323 __ mov(eax, dst);
324 __ sub(eax, src);
325 __ cmp(eax, kMinMoveDistance);
326 __ j(below, &backward_much_overlap);
327 // Copy last 16 bytes.
328 __ movdqu(xmm0, Operand(src, -0x10));
329 __ movdqu(Operand(dst, -0x10), xmm0);
330 // Find distance to alignment: dst & 0xF
331 __ mov(edx, dst);
332 __ and_(edx, 0xF);
333 __ sub(dst, edx);
334 __ sub(src, edx);
335 __ sub(count, edx);
336 // dst is now aligned. Main copy loop.
337 __ mov(loop_count, count);
338 __ shr(loop_count, 6);
339 // Check if src is also aligned.
340 __ test(src, Immediate(0xF));
341 __ j(not_zero, &unaligned_source);
342 // Copy loop for aligned source and destination.
343 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
344 // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
345 __ bind(&move_first_15);
346 __ and_(count, 0xF);
347 __ j(zero, &skip_last_move, Label::kNear);
348 __ sub(src, count);
349 __ sub(dst, count);
350 __ movdqu(xmm0, Operand(src, 0));
351 __ movdqu(Operand(dst, 0), xmm0);
352 __ bind(&skip_last_move);
353 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100354
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355 // Copy loop for unaligned source and aligned destination.
356 __ bind(&unaligned_source);
357 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
358 __ jmp(&move_first_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100359
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 // Less than kMinMoveDistance offset between dst and src.
361 Label loop_until_aligned, first_15_much_overlap;
362 __ bind(&loop_until_aligned);
363 __ dec(src);
364 __ dec(dst);
365 __ mov_b(eax, Operand(src, 0));
366 __ mov_b(Operand(dst, 0), eax);
367 __ dec(count);
368 __ bind(&backward_much_overlap); // Entry point into this block.
369 __ test(dst, Immediate(0xF));
370 __ j(not_zero, &loop_until_aligned);
371 // dst is now aligned, src can't be. Main copy loop.
372 __ mov(loop_count, count);
373 __ shr(loop_count, 6);
374 MemMoveEmitMainLoop(&masm, &first_15_much_overlap,
375 BACKWARD, MOVE_UNALIGNED);
376 __ bind(&first_15_much_overlap);
377 __ and_(count, 0xF);
378 __ j(zero, &pop_and_return);
379 // Small/medium handlers expect dst/src to point to the beginning.
380 __ sub(dst, count);
381 __ sub(src, count);
382 __ cmp(count, kSmallCopySize);
383 __ j(below_equal, &small_size);
384 __ jmp(&medium_size);
385 }
386 {
387 // Special handlers for 9 <= copy_size < 64. No assumptions about
388 // alignment or move distance, so all reads must be unaligned and
389 // must happen before any writes.
390 Label medium_handlers, f9_16, f17_32, f33_48, f49_63;
391
392 __ bind(&f9_16);
393 __ movsd(xmm0, Operand(src, 0));
394 __ movsd(xmm1, Operand(src, count, times_1, -8));
395 __ movsd(Operand(dst, 0), xmm0);
396 __ movsd(Operand(dst, count, times_1, -8), xmm1);
397 MemMoveEmitPopAndReturn(&masm);
398
399 __ bind(&f17_32);
400 __ movdqu(xmm0, Operand(src, 0));
401 __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
402 __ movdqu(Operand(dst, 0x00), xmm0);
403 __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
404 MemMoveEmitPopAndReturn(&masm);
405
406 __ bind(&f33_48);
407 __ movdqu(xmm0, Operand(src, 0x00));
408 __ movdqu(xmm1, Operand(src, 0x10));
409 __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
410 __ movdqu(Operand(dst, 0x00), xmm0);
411 __ movdqu(Operand(dst, 0x10), xmm1);
412 __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
413 MemMoveEmitPopAndReturn(&masm);
414
415 __ bind(&f49_63);
416 __ movdqu(xmm0, Operand(src, 0x00));
417 __ movdqu(xmm1, Operand(src, 0x10));
418 __ movdqu(xmm2, Operand(src, 0x20));
419 __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
420 __ movdqu(Operand(dst, 0x00), xmm0);
421 __ movdqu(Operand(dst, 0x10), xmm1);
422 __ movdqu(Operand(dst, 0x20), xmm2);
423 __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
424 MemMoveEmitPopAndReturn(&masm);
425
426 __ bind(&medium_handlers);
427 __ dd(conv.address(&f9_16));
428 __ dd(conv.address(&f17_32));
429 __ dd(conv.address(&f33_48));
430 __ dd(conv.address(&f49_63));
431
432 __ bind(&medium_size); // Entry point into this block.
433 __ mov(eax, count);
434 __ dec(eax);
435 __ shr(eax, 4);
436 if (FLAG_debug_code) {
437 Label ok;
438 __ cmp(eax, 3);
439 __ j(below_equal, &ok);
440 __ int3();
441 __ bind(&ok);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100442 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443 __ mov(eax, Operand(eax, times_4, conv.address(&medium_handlers)));
444 __ jmp(eax);
445 }
446 {
447 // Specialized copiers for copy_size <= 8 bytes.
448 Label small_handlers, f0, f1, f2, f3, f4, f5_8;
449 __ bind(&f0);
450 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100451
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 __ bind(&f1);
453 __ mov_b(eax, Operand(src, 0));
454 __ mov_b(Operand(dst, 0), eax);
455 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100456
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 __ bind(&f2);
458 __ mov_w(eax, Operand(src, 0));
459 __ mov_w(Operand(dst, 0), eax);
460 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100461
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 __ bind(&f3);
463 __ mov_w(eax, Operand(src, 0));
464 __ mov_b(edx, Operand(src, 2));
465 __ mov_w(Operand(dst, 0), eax);
466 __ mov_b(Operand(dst, 2), edx);
467 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100468
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 __ bind(&f4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100470 __ mov(eax, Operand(src, 0));
471 __ mov(Operand(dst, 0), eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100473
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474 __ bind(&f5_8);
475 __ mov(eax, Operand(src, 0));
476 __ mov(edx, Operand(src, count, times_1, -4));
477 __ mov(Operand(dst, 0), eax);
478 __ mov(Operand(dst, count, times_1, -4), edx);
479 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100480
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 __ bind(&small_handlers);
482 __ dd(conv.address(&f0));
483 __ dd(conv.address(&f1));
484 __ dd(conv.address(&f2));
485 __ dd(conv.address(&f3));
486 __ dd(conv.address(&f4));
487 __ dd(conv.address(&f5_8));
488 __ dd(conv.address(&f5_8));
489 __ dd(conv.address(&f5_8));
490 __ dd(conv.address(&f5_8));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100491
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492 __ bind(&small_size); // Entry point into this block.
493 if (FLAG_debug_code) {
494 Label ok;
495 __ cmp(count, 8);
496 __ j(below_equal, &ok);
497 __ int3();
498 __ bind(&ok);
499 }
500 __ mov(eax, Operand(count, times_4, conv.address(&small_handlers)));
501 __ jmp(eax);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100502 }
503
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 __ bind(&pop_and_return);
505 MemMoveEmitPopAndReturn(&masm);
506
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100507 CodeDesc desc;
508 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000510 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000511 base::OS::ProtectCode(buffer, actual_size);
512 // TODO(jkummerow): It would be nice to register this code creation event
513 // with the PROFILE / GDBJIT system.
514 return FUNCTION_CAST<MemMoveFunction>(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100515}
516
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100518#undef __
519
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100520// -------------------------------------------------------------------------
521// Code generators
522
523#define __ ACCESS_MASM(masm)
524
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525
526void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
527 MacroAssembler* masm,
528 Register receiver,
529 Register key,
530 Register value,
531 Register target_map,
532 AllocationSiteMode mode,
533 Label* allocation_memento_found) {
534 Register scratch = edi;
535 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
536
537 if (mode == TRACK_ALLOCATION_SITE) {
538 DCHECK(allocation_memento_found != NULL);
539 __ JumpIfJSArrayHasAllocationMemento(
540 receiver, scratch, allocation_memento_found);
541 }
542
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100543 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
545 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100546 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547 target_map,
548 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100549 kDontSaveFPRegs,
550 EMIT_REMEMBERED_SET,
551 OMIT_SMI_CHECK);
552}
553
554
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555void ElementsTransitionGenerator::GenerateSmiToDouble(
556 MacroAssembler* masm,
557 Register receiver,
558 Register key,
559 Register value,
560 Register target_map,
561 AllocationSiteMode mode,
562 Label* fail) {
563 // Return address is on the stack.
564 DCHECK(receiver.is(edx));
565 DCHECK(key.is(ecx));
566 DCHECK(value.is(eax));
567 DCHECK(target_map.is(ebx));
568
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100569 Label loop, entry, convert_hole, gc_required, only_change_map;
570
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000571 if (mode == TRACK_ALLOCATION_SITE) {
572 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
573 }
574
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100575 // Check for empty arrays, which only require a map transition and no changes
576 // to the backing store.
577 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
578 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
579 __ j(equal, &only_change_map);
580
581 __ push(eax);
582 __ push(ebx);
583
584 __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
585
586 // Allocate new FixedDoubleArray.
587 // edx: receiver
588 // edi: length of source FixedArray (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 AllocationFlags flags =
590 static_cast<AllocationFlags>(TAG_OBJECT | DOUBLE_ALIGNMENT);
591 __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
592 REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593
594 // eax: destination FixedDoubleArray
595 // edi: number of elements
596 // edx: receiver
597 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
598 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
599 __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
600 __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
601 // Replace receiver's backing store with newly created FixedDoubleArray.
602 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
603 __ mov(ebx, eax);
604 __ RecordWriteField(edx,
605 JSObject::kElementsOffset,
606 ebx,
607 edi,
608 kDontSaveFPRegs,
609 EMIT_REMEMBERED_SET,
610 OMIT_SMI_CHECK);
611
612 __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
613
614 // Prepare for conversion loop.
615 ExternalReference canonical_the_hole_nan_reference =
616 ExternalReference::address_of_the_hole_nan();
617 XMMRegister the_hole_nan = xmm1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 __ movsd(the_hole_nan,
619 Operand::StaticVariable(canonical_the_hole_nan_reference));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620 __ jmp(&entry);
621
622 // Call into runtime if GC is required.
623 __ bind(&gc_required);
624 // Restore registers before jumping into runtime.
625 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
626 __ pop(ebx);
627 __ pop(eax);
628 __ jmp(fail);
629
630 // Convert and copy elements
631 // esi: source FixedArray
632 __ bind(&loop);
633 __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
634 // ebx: current element from source
635 // edi: index of current element
636 __ JumpIfNotSmi(ebx, &convert_hole);
637
638 // Normal smi, convert it to double and store.
639 __ SmiUntag(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640 __ Cvtsi2sd(xmm0, ebx);
641 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
642 xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100643 __ jmp(&entry);
644
645 // Found hole, store hole_nan_as_double instead.
646 __ bind(&convert_hole);
647
648 if (FLAG_debug_code) {
649 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000650 __ Assert(equal, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100651 }
652
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000653 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
654 the_hole_nan);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100655
656 __ bind(&entry);
657 __ sub(edi, Immediate(Smi::FromInt(1)));
658 __ j(not_sign, &loop);
659
660 __ pop(ebx);
661 __ pop(eax);
662
663 // Restore esi.
664 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
665
666 __ bind(&only_change_map);
667 // eax: value
668 // ebx: target map
669 // Set transitioned map.
670 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
671 __ RecordWriteField(edx,
672 HeapObject::kMapOffset,
673 ebx,
674 edi,
675 kDontSaveFPRegs,
676 OMIT_REMEMBERED_SET,
677 OMIT_SMI_CHECK);
678}
679
680
681void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 MacroAssembler* masm,
683 Register receiver,
684 Register key,
685 Register value,
686 Register target_map,
687 AllocationSiteMode mode,
688 Label* fail) {
689 // Return address is on the stack.
690 DCHECK(receiver.is(edx));
691 DCHECK(key.is(ecx));
692 DCHECK(value.is(eax));
693 DCHECK(target_map.is(ebx));
694
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100695 Label loop, entry, convert_hole, gc_required, only_change_map, success;
696
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697 if (mode == TRACK_ALLOCATION_SITE) {
698 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
699 }
700
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100701 // Check for empty arrays, which only require a map transition and no changes
702 // to the backing store.
703 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
704 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
705 __ j(equal, &only_change_map);
706
707 __ push(eax);
708 __ push(edx);
709 __ push(ebx);
710
711 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
712
713 // Allocate new FixedArray.
714 // ebx: length of source FixedDoubleArray (smi-tagged)
715 __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000716 __ Allocate(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100717
718 // eax: destination FixedArray
719 // ebx: number of elements
720 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
721 Immediate(masm->isolate()->factory()->fixed_array_map()));
722 __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
723 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
724
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400725 // Allocating heap numbers in the loop below can fail and cause a jump to
726 // gc_required. We can't leave a partly initialized FixedArray behind,
727 // so pessimistically fill it with holes now.
728 Label initialization_loop, initialization_loop_entry;
729 __ jmp(&initialization_loop_entry, Label::kNear);
730 __ bind(&initialization_loop);
731 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
732 masm->isolate()->factory()->the_hole_value());
733 __ bind(&initialization_loop_entry);
734 __ sub(ebx, Immediate(Smi::FromInt(1)));
735 __ j(not_sign, &initialization_loop);
736
737 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100738 __ jmp(&entry);
739
740 // ebx: target map
741 // edx: receiver
742 // Set transitioned map.
743 __ bind(&only_change_map);
744 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
745 __ RecordWriteField(edx,
746 HeapObject::kMapOffset,
747 ebx,
748 edi,
749 kDontSaveFPRegs,
750 OMIT_REMEMBERED_SET,
751 OMIT_SMI_CHECK);
752 __ jmp(&success);
753
754 // Call into runtime if GC is required.
755 __ bind(&gc_required);
756 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
757 __ pop(ebx);
758 __ pop(edx);
759 __ pop(eax);
760 __ jmp(fail);
761
762 // Box doubles into heap numbers.
763 // edi: source FixedDoubleArray
764 // eax: destination FixedArray
765 __ bind(&loop);
766 // ebx: index of current element (smi-tagged)
767 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
768 __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
769 __ j(equal, &convert_hole);
770
771 // Non-hole double, copy value into a heap number.
772 __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
773 // edx: new heap number
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 __ movsd(xmm0,
775 FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
776 __ movsd(FieldOperand(edx, HeapNumber::kValueOffset), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100777 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
778 __ mov(esi, ebx);
779 __ RecordWriteArray(eax,
780 edx,
781 esi,
782 kDontSaveFPRegs,
783 EMIT_REMEMBERED_SET,
784 OMIT_SMI_CHECK);
785 __ jmp(&entry, Label::kNear);
786
787 // Replace the-hole NaN with the-hole pointer.
788 __ bind(&convert_hole);
789 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
790 masm->isolate()->factory()->the_hole_value());
791
792 __ bind(&entry);
793 __ sub(ebx, Immediate(Smi::FromInt(1)));
794 __ j(not_sign, &loop);
795
796 __ pop(ebx);
797 __ pop(edx);
798 // ebx: target map
799 // edx: receiver
800 // Set transitioned map.
801 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
802 __ RecordWriteField(edx,
803 HeapObject::kMapOffset,
804 ebx,
805 edi,
806 kDontSaveFPRegs,
807 OMIT_REMEMBERED_SET,
808 OMIT_SMI_CHECK);
809 // Replace receiver's backing store with newly created and filled FixedArray.
810 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
811 __ RecordWriteField(edx,
812 JSObject::kElementsOffset,
813 eax,
814 edi,
815 kDontSaveFPRegs,
816 EMIT_REMEMBERED_SET,
817 OMIT_SMI_CHECK);
818
819 // Restore registers.
820 __ pop(eax);
821 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
822
823 __ bind(&success);
824}
825
826
827void StringCharLoadGenerator::Generate(MacroAssembler* masm,
828 Factory* factory,
829 Register string,
830 Register index,
831 Register result,
832 Label* call_runtime) {
833 // Fetch the instance type of the receiver into result register.
834 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
835 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
836
837 // We need special handling for indirect strings.
838 Label check_sequential;
839 __ test(result, Immediate(kIsIndirectStringMask));
840 __ j(zero, &check_sequential, Label::kNear);
841
842 // Dispatch on the indirect string shape: slice or cons.
843 Label cons_string;
844 __ test(result, Immediate(kSlicedNotConsMask));
845 __ j(zero, &cons_string, Label::kNear);
846
847 // Handle slices.
848 Label indirect_string_loaded;
849 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
850 __ SmiUntag(result);
851 __ add(index, result);
852 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
853 __ jmp(&indirect_string_loaded, Label::kNear);
854
855 // Handle cons strings.
856 // Check whether the right hand side is the empty string (i.e. if
857 // this is really a flat string in a cons string). If that is not
858 // the case we would rather go to the runtime system now to flatten
859 // the string.
860 __ bind(&cons_string);
861 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
862 Immediate(factory->empty_string()));
863 __ j(not_equal, call_runtime);
864 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
865
866 __ bind(&indirect_string_loaded);
867 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
868 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
869
870 // Distinguish sequential and external strings. Only these two string
871 // representations can reach here (slices and flat cons strings have been
872 // reduced to the underlying sequential or external string).
873 Label seq_string;
874 __ bind(&check_sequential);
875 STATIC_ASSERT(kSeqStringTag == 0);
876 __ test(result, Immediate(kStringRepresentationMask));
877 __ j(zero, &seq_string, Label::kNear);
878
879 // Handle external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880 Label one_byte_external, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100881 if (FLAG_debug_code) {
882 // Assert that we do not have a cons or slice (indirect strings) here.
883 // Sequential strings have already been ruled out.
884 __ test(result, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000885 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100886 }
887 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000888 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100889 __ test_b(result, kShortExternalStringMask);
890 __ j(not_zero, call_runtime);
891 // Check encoding.
892 STATIC_ASSERT(kTwoByteStringTag == 0);
893 __ test_b(result, kStringEncodingMask);
894 __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000895 __ j(not_equal, &one_byte_external, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100896 // Two-byte string.
897 __ movzx_w(result, Operand(result, index, times_2, 0));
898 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000899 __ bind(&one_byte_external);
900 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100901 __ movzx_b(result, Operand(result, index, times_1, 0));
902 __ jmp(&done, Label::kNear);
903
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000904 // Dispatch on the encoding: one-byte or two-byte.
905 Label one_byte;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100906 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000907 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100908 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
909 __ test(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000910 __ j(not_zero, &one_byte, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100911
912 // Two-byte string.
913 // Load the two-byte character code into the result register.
914 __ movzx_w(result, FieldOperand(string,
915 index,
916 times_2,
917 SeqTwoByteString::kHeaderSize));
918 __ jmp(&done, Label::kNear);
919
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100921 // Load the byte into the result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000922 __ bind(&one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100923 __ movzx_b(result, FieldOperand(string,
924 index,
925 times_1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926 SeqOneByteString::kHeaderSize));
927 __ bind(&done);
928}
929
930
931static Operand ExpConstant(int index) {
932 return Operand::StaticVariable(ExternalReference::math_exp_constants(index));
933}
934
935
936void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
937 XMMRegister input,
938 XMMRegister result,
939 XMMRegister double_scratch,
940 Register temp1,
941 Register temp2) {
942 DCHECK(!input.is(double_scratch));
943 DCHECK(!input.is(result));
944 DCHECK(!result.is(double_scratch));
945 DCHECK(!temp1.is(temp2));
946 DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
947 DCHECK(!masm->serializer_enabled()); // External references not serializable.
948
949 Label done;
950
951 __ movsd(double_scratch, ExpConstant(0));
952 __ xorpd(result, result);
953 __ ucomisd(double_scratch, input);
954 __ j(above_equal, &done);
955 __ ucomisd(input, ExpConstant(1));
956 __ movsd(result, ExpConstant(2));
957 __ j(above_equal, &done);
958 __ movsd(double_scratch, ExpConstant(3));
959 __ movsd(result, ExpConstant(4));
960 __ mulsd(double_scratch, input);
961 __ addsd(double_scratch, result);
962 __ movd(temp2, double_scratch);
963 __ subsd(double_scratch, result);
964 __ movsd(result, ExpConstant(6));
965 __ mulsd(double_scratch, ExpConstant(5));
966 __ subsd(double_scratch, input);
967 __ subsd(result, double_scratch);
968 __ movsd(input, double_scratch);
969 __ mulsd(input, double_scratch);
970 __ mulsd(result, input);
971 __ mov(temp1, temp2);
972 __ mulsd(result, ExpConstant(7));
973 __ subsd(result, double_scratch);
974 __ add(temp1, Immediate(0x1ff800));
975 __ addsd(result, ExpConstant(8));
976 __ and_(temp2, Immediate(0x7ff));
977 __ shr(temp1, 11);
978 __ shl(temp1, 20);
979 __ movd(input, temp1);
980 __ pshufd(input, input, static_cast<uint8_t>(0xe1)); // Order: 11 10 00 01
981 __ movsd(double_scratch, Operand::StaticArray(
982 temp2, times_8, ExternalReference::math_exp_log_table()));
983 __ orps(input, double_scratch);
984 __ mulsd(result, input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100985 __ bind(&done);
986}
987
988#undef __
989
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000990
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000991CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
992 USE(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000994 CodePatcher patcher(isolate, young_sequence_.start(),
995 young_sequence_.length());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000996 patcher.masm()->push(ebp);
997 patcher.masm()->mov(ebp, esp);
998 patcher.masm()->push(esi);
999 patcher.masm()->push(edi);
1000}
1001
1002
1003#ifdef DEBUG
1004bool CodeAgingHelper::IsOld(byte* candidate) const {
1005 return *candidate == kCallOpcode;
1006}
1007#endif
1008
1009
1010bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
1011 bool result = isolate->code_aging_helper()->IsYoung(sequence);
1012 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
1013 return result;
1014}
1015
1016
1017void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
1018 MarkingParity* parity) {
1019 if (IsYoungSequence(isolate, sequence)) {
1020 *age = kNoAgeCodeAge;
1021 *parity = NO_MARKING_PARITY;
1022 } else {
1023 sequence++; // Skip the kCallOpcode byte
1024 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
1025 Assembler::kCallTargetAddressOffset;
1026 Code* stub = GetCodeFromTargetAddress(target_address);
1027 GetCodeAgeAndParity(stub, age, parity);
1028 }
1029}
1030
1031
1032void Code::PatchPlatformCodeAge(Isolate* isolate,
1033 byte* sequence,
1034 Code::Age age,
1035 MarkingParity parity) {
1036 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
1037 if (age == kNoAgeCodeAge) {
1038 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039 Assembler::FlushICache(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001040 } else {
1041 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 CodePatcher patcher(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043 patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
1044 }
1045}
1046
1047
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001048} // namespace internal
1049} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01001050
1051#endif // V8_TARGET_ARCH_IA32